diff --git a/.ci/bwcVersions b/.ci/bwcVersions
index 67976d58188e2..1ba3ee562317a 100644
--- a/.ci/bwcVersions
+++ b/.ci/bwcVersions
@@ -48,4 +48,5 @@ BWC_VERSION:
- "2.1.0"
- "2.1.1"
- "2.2.0"
+ - "2.2.1"
- "2.3.0"
diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md
index ce84d9658a808..8c2a6b4889122 100644
--- a/DEVELOPER_GUIDE.md
+++ b/DEVELOPER_GUIDE.md
@@ -4,7 +4,8 @@
- [Install Prerequisites](#install-prerequisites)
- [JDK 11](#jdk-11)
- [JDK 14](#jdk-14)
- - [Runtime JDK](#runtime-jdk)
+ - [JDK 17](#jdk-17)
+ - [Custom Runtime JDK](#custom-runtime-jdk)
- [Windows](#windows)
- [Docker](#docker)
- [Build](#build)
@@ -12,6 +13,7 @@
- [Run OpenSearch](#run-opensearch)
- [Use an Editor](#use-an-editor)
- [IntelliJ IDEA](#intellij-idea)
+ - [Remote development using JetBrains Gateway](#remote-development-using-jetbrains-gateway)
- [Visual Studio Code](#visual-studio-code)
- [Eclipse](#eclipse)
- [Project Layout](#project-layout)
@@ -35,6 +37,7 @@
- [testImplementation](#testimplementation)
- [Gradle Plugins](#gradle-plugins)
- [Distribution Download Plugin](#distribution-download-plugin)
+ - [Creating fat-JAR of a Module](#creating-fat-jar-of-a-module)
- [Misc](#misc)
- [git-secrets](#git-secrets)
- [Installation](#installation)
@@ -49,7 +52,7 @@
- [Submitting Changes](#submitting-changes)
- [Backports](#backports)
- [LineLint](#linelint)
- - [Lucene Snapshots](#lucene-snapshots)
+- [Lucene Snapshots](#lucene-snapshots)
# Developer Guide
@@ -374,6 +377,42 @@ The Distribution Download plugin downloads the latest version of OpenSearch by d
./gradlew integTest -PcustomDistributionUrl="https://ci.opensearch.org/ci/dbc/bundle-build/1.2.0/1127/linux/x64/dist/opensearch-1.2.0-linux-x64.tar.gz"
```
+### Creating fat-JAR of a Module
+
+A fat-JAR (or an uber-JAR) is the JAR, which contains classes from all the libraries, on which your project depends and, of course, the classes of current project.
+
+There might be cases where a developer would like to add some custom logic to the code of a module (or multiple modules) and generate a fat-JAR that can be directly used by the dependency management tool. For example, in [#3665](https://github.com/opensearch-project/OpenSearch/pull/3665) a developer wanted to provide a tentative patch as a fat-JAR to a consumer for changes made in the high level REST client.
+
+Use [Gradle Shadow plugin](https://imperceptiblethoughts.com/shadow/).
+Add the following to the `build.gradle` file of the module for which you want to create the fat-JAR, e.g. `client/rest-high-level/build.gradle`:
+
+```
+apply plugin: 'com.github.johnrengelman.shadow'
+```
+
+Run the `shadowJar` command using:
+```
+./gradlew :client:rest-high-level:shadowJar
+```
+
+This will generate a fat-JAR in the `build/distributions` folder of the module, e.g. .`/client/rest-high-level/build/distributions/opensearch-rest-high-level-client-1.4.0-SNAPSHOT.jar`.
+
+You can further customize your fat-JAR by customising the plugin, More information about shadow plugin can be found [here](https://imperceptiblethoughts.com/shadow/).
+
+To use the generated JAR, install the JAR locally, e.g.
+```
+mvn install:install-file -Dfile=src/main/resources/opensearch-rest-high-level-client-1.4.0-SNAPSHOT.jar -DgroupId=org.opensearch.client -DartifactId=opensearch-rest-high-level-client -Dversion=1.4.0-SNAPSHOT -Dpackaging=jar -DgeneratePom=true
+```
+
+Refer the installed JAR as any other maven artifact, e.g.
+
+```
+
+ org.opensearch.client
+ opensearch-rest-high-level-client
+ 1.4.0-SNAPSHOT
+
+```
## Misc
diff --git a/build.gradle b/build.gradle
index e0bb961ce14c2..ce5ea6cdd7e11 100644
--- a/build.gradle
+++ b/build.gradle
@@ -55,7 +55,7 @@ plugins {
id 'lifecycle-base'
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
- id "com.diffplug.spotless" version "6.9.0" apply false
+ id "com.diffplug.spotless" version "6.9.1" apply false
id "org.gradle.test-retry" version "1.4.0" apply false
id "test-report-aggregation"
id 'jacoco-report-aggregation'
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 8e9911294977a..4af1acfed0ab2 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,5 +1,5 @@
opensearch = 3.0.0
-lucene = 9.3.0
+lucene = 9.4.0-snapshot-ddf0d0a
bundled_jdk_vendor = adoptium
bundled_jdk = 17.0.4+8
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java
index c4901f7e03dbf..0ec2935e71492 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java
@@ -112,10 +112,6 @@
import org.opensearch.search.aggregations.bucket.filter.FiltersAggregationBuilder;
import org.opensearch.search.aggregations.bucket.filter.ParsedFilter;
import org.opensearch.search.aggregations.bucket.filter.ParsedFilters;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
-import org.opensearch.search.aggregations.bucket.geogrid.ParsedGeoHashGrid;
-import org.opensearch.search.aggregations.bucket.geogrid.ParsedGeoTileGrid;
import org.opensearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.opensearch.search.aggregations.bucket.global.ParsedGlobal;
import org.opensearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder;
@@ -161,7 +157,6 @@
import org.opensearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.opensearch.search.aggregations.metrics.CardinalityAggregationBuilder;
import org.opensearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder;
-import org.opensearch.search.aggregations.metrics.GeoBoundsAggregationBuilder;
import org.opensearch.search.aggregations.metrics.GeoCentroidAggregationBuilder;
import org.opensearch.search.aggregations.metrics.InternalHDRPercentileRanks;
import org.opensearch.search.aggregations.metrics.InternalHDRPercentiles;
@@ -173,7 +168,6 @@
import org.opensearch.search.aggregations.metrics.ParsedAvg;
import org.opensearch.search.aggregations.metrics.ParsedCardinality;
import org.opensearch.search.aggregations.metrics.ParsedExtendedStats;
-import org.opensearch.search.aggregations.metrics.ParsedGeoBounds;
import org.opensearch.search.aggregations.metrics.ParsedGeoCentroid;
import org.opensearch.search.aggregations.metrics.ParsedHDRPercentileRanks;
import org.opensearch.search.aggregations.metrics.ParsedHDRPercentiles;
@@ -2240,7 +2234,6 @@ static List getDefaultNamedXContents() {
map.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c));
map.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c));
map.put(ExtendedStatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedExtendedStatsBucket.fromXContent(p, (String) c));
- map.put(GeoBoundsAggregationBuilder.NAME, (p, c) -> ParsedGeoBounds.fromXContent(p, (String) c));
map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c));
map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c));
map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c));
@@ -2257,8 +2250,6 @@ static List getDefaultNamedXContents() {
map.put(GlobalAggregationBuilder.NAME, (p, c) -> ParsedGlobal.fromXContent(p, (String) c));
map.put(FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c));
map.put(InternalSampler.PARSER_NAME, (p, c) -> ParsedSampler.fromXContent(p, (String) c));
- map.put(GeoHashGridAggregationBuilder.NAME, (p, c) -> ParsedGeoHashGrid.fromXContent(p, (String) c));
- map.put(GeoTileGridAggregationBuilder.NAME, (p, c) -> ParsedGeoTileGrid.fromXContent(p, (String) c));
map.put(RangeAggregationBuilder.NAME, (p, c) -> ParsedRange.fromXContent(p, (String) c));
map.put(DateRangeAggregationBuilder.NAME, (p, c) -> ParsedDateRange.fromXContent(p, (String) c));
map.put(GeoDistanceAggregationBuilder.NAME, (p, c) -> ParsedGeoDistance.fromXContent(p, (String) c));
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 24c164f0f1e12..58e9a16f424db 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -11,7 +11,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionSha256Sum=97a52d145762adc241bad7fd18289bf7f6801e08ece6badf80402fe2b9f250b1
+distributionSha256Sum=db9c8211ed63f61f60292c69e80d89196f9eb36665e369e7f00ac4cc841c2219
diff --git a/modules/geo/build.gradle b/modules/geo/build.gradle
index 1146b6924b2f8..7f687a414e566 100644
--- a/modules/geo/build.gradle
+++ b/modules/geo/build.gradle
@@ -28,18 +28,25 @@
* under the License.
*/
apply plugin: 'opensearch.yaml-rest-test'
+apply plugin: 'opensearch.internal-cluster-test'
opensearchplugin {
- description 'Placeholder plugin for geospatial features in OpenSearch. only registers geo_shape field mapper for now'
+ description 'Plugin for geospatial features in OpenSearch. Registering the geo_shape and aggregations GeoBounds on Geo_Shape and Geo_Point'
classname 'org.opensearch.geo.GeoModulePlugin'
}
restResources {
restApi {
- includeCore '_common', 'indices', 'index', 'search'
+ includeCore '_common', 'indices', 'index', 'search', 'bulk'
}
}
artifacts {
restTests(project.file('src/yamlRestTest/resources/rest-api-spec/test'))
}
-test.enabled = false
+/**
+ * These compiler arguments needs to be removed, as there are raw types being used in the GeoGrid and GeoTile aggregations.
+ */
+tasks.withType(JavaCompile).configureEach {
+ options.compilerArgs -= '-Xlint:rawtypes'
+ options.compilerArgs -= '-Xlint:unchecked'
+}
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java
new file mode 100644
index 0000000000000..7dc6f2c1b89b7
--- /dev/null
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java
@@ -0,0 +1,47 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo;
+
+import org.opensearch.index.mapper.GeoShapeFieldMapper;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.TestGeoShapeFieldMapperPlugin;
+
+import java.util.Collection;
+import java.util.Collections;
+
+/**
+ * This is the base class for all the Geo related integration tests. Use this class to add the features and settings
+ * for the test cluster on which integration tests are running.
+ */
+public abstract class GeoModulePluginIntegTestCase extends OpenSearchIntegTestCase {
+ /**
+ * Returns a collection of plugins that should be loaded on each node for doing the integration tests. As this
+ * geo plugin is not getting packaged in a zip, we need to load it before the tests run.
+ *
+ * @return List of {@link Plugin}
+ */
+ @Override
+ protected Collection> nodePlugins() {
+ return Collections.singletonList(GeoModulePlugin.class);
+ }
+
+ /**
+ * This was added as a backdoor to Mock the implementation of {@link GeoShapeFieldMapper} which was coming from
+ * {@link GeoModulePlugin}. Mock implementation is {@link TestGeoShapeFieldMapperPlugin}. Now we are using the
+ * {@link GeoModulePlugin} in our integration tests we need to override this functionality to avoid multiple mapper
+ * error.
+ *
+ * @return boolean
+ */
+ @Override
+ protected boolean addMockGeoShapeFieldMapper() {
+ return false;
+ }
+}
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java
new file mode 100644
index 0000000000000..2ac73728b2dab
--- /dev/null
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java
@@ -0,0 +1,59 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo.search;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.geo.GeoModulePluginIntegTestCase;
+import org.opensearch.geo.search.aggregations.metrics.GeoBounds;
+import org.opensearch.geo.tests.common.AggregationBuilders;
+import org.opensearch.test.OpenSearchIntegTestCase;
+
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+import static org.hamcrest.Matchers.closeTo;
+
+@OpenSearchIntegTestCase.SuiteScopeTestCase
+public class MissingValueIT extends GeoModulePluginIntegTestCase {
+
+ @Override
+ protected void setupSuiteScopeCluster() throws Exception {
+ assertAcked(prepareCreate("idx").setMapping("date", "type=date", "location", "type=geo_point", "str", "type=keyword").get());
+ indexRandom(
+ true,
+ client().prepareIndex("idx").setId("1").setSource(),
+ client().prepareIndex("idx")
+ .setId("2")
+ .setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2")
+ );
+ }
+
+ public void testUnmappedGeoBounds() {
+ SearchResponse response = client().prepareSearch("idx")
+ .addAggregation(AggregationBuilders.geoBounds("bounds").field("non_existing_field").missing("2,1"))
+ .get();
+ assertSearchResponse(response);
+ GeoBounds bounds = response.getAggregations().get("bounds");
+ assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5));
+ assertThat(bounds.bottomRight().lon(), closeTo(1.0, 1E-5));
+ assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5));
+ assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5));
+ }
+
+ public void testGeoBounds() {
+ SearchResponse response = client().prepareSearch("idx")
+ .addAggregation(AggregationBuilders.geoBounds("bounds").field("location").missing("2,1"))
+ .get();
+ assertSearchResponse(response);
+ GeoBounds bounds = response.getAggregations().get("bounds");
+ assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5));
+ assertThat(bounds.bottomRight().lon(), closeTo(2.0, 1E-5));
+ assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5));
+ assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5));
+ }
+}
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridIT.java
similarity index 89%
rename from server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java
rename to modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridIT.java
index 56d918feef9d8..6ab7dd5254679 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridIT.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket;
+package org.opensearch.geo.search.aggregations.bucket;
import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntMap;
@@ -41,12 +41,12 @@
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentBuilder;
+import org.opensearch.geo.GeoModulePluginIntegTestCase;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGrid;
+import org.opensearch.geo.tests.common.AggregationBuilders;
import org.opensearch.index.query.GeoBoundingBoxQueryBuilder;
-import org.opensearch.search.aggregations.AggregationBuilders;
import org.opensearch.search.aggregations.InternalAggregation;
import org.opensearch.search.aggregations.bucket.filter.Filter;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoGrid;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoGrid.Bucket;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.VersionUtils;
@@ -57,17 +57,16 @@
import java.util.Random;
import java.util.Set;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.opensearch.geometry.utils.Geohash.PRECISION;
import static org.opensearch.geometry.utils.Geohash.stringEncode;
-import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
-import static org.opensearch.search.aggregations.AggregationBuilders.geohashGrid;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.equalTo;
@OpenSearchIntegTestCase.SuiteScopeTestCase
-public class GeoHashGridIT extends OpenSearchIntegTestCase {
+public class GeoHashGridIT extends GeoModulePluginIntegTestCase {
@Override
protected boolean forbidPrivateIndexSettings() {
@@ -158,13 +157,13 @@ public void setupSuiteScopeCluster() throws Exception {
public void testSimple() throws Exception {
for (int precision = 1; precision <= PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
+ .addAggregation(AggregationBuilders.geohashGrid("geohashgrid").field("location").precision(precision))
.get();
assertSearchResponse(response);
GeoGrid geoGrid = response.getAggregations().get("geohashgrid");
- List extends Bucket> buckets = geoGrid.getBuckets();
+ List extends GeoGrid.Bucket> buckets = geoGrid.getBuckets();
Object[] propertiesKeys = (Object[]) ((InternalAggregation) geoGrid).getProperty("_key");
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) geoGrid).getProperty("_count");
for (int i = 0; i < buckets.size(); i++) {
@@ -185,7 +184,7 @@ public void testSimple() throws Exception {
public void testMultivalued() throws Exception {
for (int precision = 1; precision <= PRECISION; precision++) {
SearchResponse response = client().prepareSearch("multi_valued_idx")
- .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
+ .addAggregation(AggregationBuilders.geohashGrid("geohashgrid").field("location").precision(precision))
.get();
assertSearchResponse(response);
@@ -208,8 +207,8 @@ public void testFiltered() throws Exception {
for (int precision = 1; precision <= PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(
- AggregationBuilders.filter("filtered", bbox)
- .subAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
+ org.opensearch.search.aggregations.AggregationBuilders.filter("filtered", bbox)
+ .subAggregation(AggregationBuilders.geohashGrid("geohashgrid").field("location").precision(precision))
)
.get();
@@ -233,7 +232,7 @@ public void testFiltered() throws Exception {
public void testUnmapped() throws Exception {
for (int precision = 1; precision <= PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx_unmapped")
- .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
+ .addAggregation(AggregationBuilders.geohashGrid("geohashgrid").field("location").precision(precision))
.get();
assertSearchResponse(response);
@@ -247,7 +246,7 @@ public void testUnmapped() throws Exception {
public void testPartiallyUnmapped() throws Exception {
for (int precision = 1; precision <= PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
- .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
+ .addAggregation(AggregationBuilders.geohashGrid("geohashgrid").field("location").precision(precision))
.get();
assertSearchResponse(response);
@@ -267,7 +266,9 @@ public void testPartiallyUnmapped() throws Exception {
public void testTopMatch() throws Exception {
for (int precision = 1; precision <= PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(geohashGrid("geohashgrid").field("location").size(1).shardSize(100).precision(precision))
+ .addAggregation(
+ AggregationBuilders.geohashGrid("geohashgrid").field("location").size(1).shardSize(100).precision(precision)
+ )
.get();
assertSearchResponse(response);
@@ -296,7 +297,7 @@ public void testSizeIsZero() {
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> client().prepareSearch("idx")
- .addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize))
+ .addAggregation(AggregationBuilders.geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize))
.get()
);
assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [geohashgrid]"));
@@ -308,7 +309,7 @@ public void testShardSizeIsZero() {
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> client().prepareSearch("idx")
- .addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize))
+ .addAggregation(AggregationBuilders.geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize))
.get()
);
assertThat(exception.getMessage(), containsString("[shardSize] must be greater than 0. Found [0] in [geohashgrid]"));
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/ShardReduceIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/ShardReduceIT.java
new file mode 100644
index 0000000000000..5b4dd052a2f65
--- /dev/null
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/bucket/ShardReduceIT.java
@@ -0,0 +1,107 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo.search.aggregations.bucket;
+
+import org.opensearch.action.index.IndexRequestBuilder;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.geo.GeoModulePluginIntegTestCase;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGrid;
+import org.opensearch.geo.tests.common.AggregationBuilders;
+import org.opensearch.geometry.utils.Geohash;
+import org.opensearch.index.query.QueryBuilders;
+import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval;
+import org.opensearch.search.aggregations.bucket.histogram.Histogram;
+import org.opensearch.test.OpenSearchIntegTestCase;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.opensearch.search.aggregations.AggregationBuilders.dateHistogram;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+
+/**
+ * Tests making sure that the reduce is propagated to all aggregations in the hierarchy when executing on a single shard
+ * These tests are based on the date histogram in combination of min_doc_count=0. In order for the date histogram to
+ * compute empty buckets, its {@code reduce()} method must be called. So by adding the date histogram under other buckets,
+ * we can make sure that the reduce is properly propagated by checking that empty buckets were created.
+ */
+@OpenSearchIntegTestCase.SuiteScopeTestCase
+public class ShardReduceIT extends GeoModulePluginIntegTestCase {
+
+ private IndexRequestBuilder indexDoc(String date, int value) throws Exception {
+ return client().prepareIndex("idx")
+ .setSource(
+ jsonBuilder().startObject()
+ .field("value", value)
+ .field("ip", "10.0.0." + value)
+ .field("location", Geohash.stringEncode(5, 52, Geohash.PRECISION))
+ .field("date", date)
+ .field("term-l", 1)
+ .field("term-d", 1.5)
+ .field("term-s", "term")
+ .startObject("nested")
+ .field("date", date)
+ .endObject()
+ .endObject()
+ );
+ }
+
+ @Override
+ public void setupSuiteScopeCluster() throws Exception {
+ assertAcked(
+ prepareCreate("idx").setMapping(
+ "nested",
+ "type=nested",
+ "ip",
+ "type=ip",
+ "location",
+ "type=geo_point",
+ "term-s",
+ "type=keyword"
+ )
+ );
+
+ indexRandom(true, indexDoc("2014-01-01", 1), indexDoc("2014-01-02", 2), indexDoc("2014-01-04", 3));
+ ensureSearchable();
+ }
+
+ public void testGeoHashGrid() throws Exception {
+ SearchResponse response = client().prepareSearch("idx")
+ .setQuery(QueryBuilders.matchAllQuery())
+ .addAggregation(
+ AggregationBuilders.geohashGrid("grid")
+ .field("location")
+ .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0))
+ )
+ .get();
+
+ assertSearchResponse(response);
+
+ GeoGrid grid = response.getAggregations().get("grid");
+ Histogram histo = grid.getBuckets().iterator().next().getAggregations().get("histo");
+ assertThat(histo.getBuckets().size(), equalTo(4));
+ }
+
+ public void testGeoTileGrid() throws Exception {
+ SearchResponse response = client().prepareSearch("idx")
+ .setQuery(QueryBuilders.matchAllQuery())
+ .addAggregation(
+ AggregationBuilders.geotileGrid("grid")
+ .field("location")
+ .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0))
+ )
+ .get();
+
+ assertSearchResponse(response);
+
+ GeoGrid grid = response.getAggregations().get("grid");
+ Histogram histo = grid.getBuckets().iterator().next().getAggregations().get("histo");
+ assertThat(histo.getBuckets().size(), equalTo(4));
+ }
+}
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java
new file mode 100644
index 0000000000000..92987d407f51d
--- /dev/null
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java
@@ -0,0 +1,295 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo.search.aggregations.metrics;
+
+import com.carrotsearch.hppc.ObjectIntHashMap;
+import com.carrotsearch.hppc.ObjectIntMap;
+import com.carrotsearch.hppc.ObjectObjectHashMap;
+import com.carrotsearch.hppc.ObjectObjectMap;
+import org.opensearch.action.index.IndexRequestBuilder;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.common.Strings;
+import org.opensearch.common.document.DocumentField;
+import org.opensearch.common.geo.GeoPoint;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.xcontent.ToXContent;
+import org.opensearch.common.xcontent.XContentBuilder;
+import org.opensearch.common.xcontent.XContentFactory;
+import org.opensearch.geo.GeoModulePluginIntegTestCase;
+import org.opensearch.geo.tests.common.RandomGeoGenerator;
+import org.opensearch.geometry.utils.Geohash;
+import org.opensearch.search.SearchHit;
+import org.opensearch.search.sort.SortBuilders;
+import org.opensearch.search.sort.SortOrder;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+
+/**
+ * This is base class for all Geo Aggregations Integration Tests. This class is similar to what we have in the server
+ * folder of the OpenSearch repo. As part of moving the Geo based aggregation into separate module and plugin we need
+ * to copy the code as we cannot depend on this class.
+ * GitHub issue
+ */
+public abstract class AbstractGeoAggregatorModulePluginTestCase extends GeoModulePluginIntegTestCase {
+
+ protected static final String SINGLE_VALUED_FIELD_NAME = "geo_value";
+ protected static final String MULTI_VALUED_FIELD_NAME = "geo_values";
+ protected static final String NUMBER_FIELD_NAME = "l_values";
+ protected static final String UNMAPPED_IDX_NAME = "idx_unmapped";
+ protected static final String IDX_NAME = "idx";
+ protected static final String EMPTY_IDX_NAME = "empty_idx";
+ protected static final String DATELINE_IDX_NAME = "dateline_idx";
+ protected static final String HIGH_CARD_IDX_NAME = "high_card_idx";
+ protected static final String IDX_ZERO_NAME = "idx_zero";
+
+ protected static int numDocs;
+ protected static int numUniqueGeoPoints;
+ protected static GeoPoint[] singleValues, multiValues;
+ protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, singleCentroid, multiCentroid,
+ unmappedCentroid;
+ protected static ObjectIntMap expectedDocCountsForGeoHash = null;
+ protected static ObjectObjectMap expectedCentroidsForGeoHash = null;
+ protected static final double GEOHASH_TOLERANCE = 1E-5D;
+
+ @Override
+ public void setupSuiteScopeCluster() throws Exception {
+ createIndex(UNMAPPED_IDX_NAME);
+ assertAcked(
+ prepareCreate(IDX_NAME).setMapping(
+ SINGLE_VALUED_FIELD_NAME,
+ "type=geo_point",
+ MULTI_VALUED_FIELD_NAME,
+ "type=geo_point",
+ NUMBER_FIELD_NAME,
+ "type=long",
+ "tag",
+ "type=keyword"
+ )
+ );
+
+ singleTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
+ singleBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
+ multiTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
+ multiBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
+ singleCentroid = new GeoPoint(0, 0);
+ multiCentroid = new GeoPoint(0, 0);
+ unmappedCentroid = new GeoPoint(0, 0);
+
+ numDocs = randomIntBetween(6, 20);
+ numUniqueGeoPoints = randomIntBetween(1, numDocs);
+ expectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
+ expectedCentroidsForGeoHash = new ObjectObjectHashMap<>(numDocs * 2);
+
+ singleValues = new GeoPoint[numUniqueGeoPoints];
+ for (int i = 0; i < singleValues.length; i++) {
+ singleValues[i] = RandomGeoGenerator.randomPoint(random());
+ updateBoundsTopLeft(singleValues[i], singleTopLeft);
+ updateBoundsBottomRight(singleValues[i], singleBottomRight);
+ }
+
+ multiValues = new GeoPoint[numUniqueGeoPoints];
+ for (int i = 0; i < multiValues.length; i++) {
+ multiValues[i] = RandomGeoGenerator.randomPoint(random());
+ updateBoundsTopLeft(multiValues[i], multiTopLeft);
+ updateBoundsBottomRight(multiValues[i], multiBottomRight);
+ }
+
+ List builders = new ArrayList<>();
+
+ GeoPoint singleVal;
+ final GeoPoint[] multiVal = new GeoPoint[2];
+ double newMVLat, newMVLon;
+ for (int i = 0; i < numDocs; i++) {
+ singleVal = singleValues[i % numUniqueGeoPoints];
+ multiVal[0] = multiValues[i % numUniqueGeoPoints];
+ multiVal[1] = multiValues[(i + 1) % numUniqueGeoPoints];
+ builders.add(
+ client().prepareIndex(IDX_NAME)
+ .setSource(
+ jsonBuilder().startObject()
+ .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat())
+ .startArray(MULTI_VALUED_FIELD_NAME)
+ .startArray()
+ .value(multiVal[0].lon())
+ .value(multiVal[0].lat())
+ .endArray()
+ .startArray()
+ .value(multiVal[1].lon())
+ .value(multiVal[1].lat())
+ .endArray()
+ .endArray()
+ .field(NUMBER_FIELD_NAME, i)
+ .field("tag", "tag" + i)
+ .endObject()
+ )
+ );
+ singleCentroid = singleCentroid.reset(
+ singleCentroid.lat() + (singleVal.lat() - singleCentroid.lat()) / (i + 1),
+ singleCentroid.lon() + (singleVal.lon() - singleCentroid.lon()) / (i + 1)
+ );
+ newMVLat = (multiVal[0].lat() + multiVal[1].lat()) / 2d;
+ newMVLon = (multiVal[0].lon() + multiVal[1].lon()) / 2d;
+ multiCentroid = multiCentroid.reset(
+ multiCentroid.lat() + (newMVLat - multiCentroid.lat()) / (i + 1),
+ multiCentroid.lon() + (newMVLon - multiCentroid.lon()) / (i + 1)
+ );
+ }
+
+ assertAcked(prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point"));
+
+ assertAcked(
+ prepareCreate(DATELINE_IDX_NAME).setMapping(
+ SINGLE_VALUED_FIELD_NAME,
+ "type=geo_point",
+ MULTI_VALUED_FIELD_NAME,
+ "type=geo_point",
+ NUMBER_FIELD_NAME,
+ "type=long",
+ "tag",
+ "type=keyword"
+ )
+ );
+
+ GeoPoint[] geoValues = new GeoPoint[5];
+ geoValues[0] = new GeoPoint(38, 178);
+ geoValues[1] = new GeoPoint(12, -179);
+ geoValues[2] = new GeoPoint(-24, 170);
+ geoValues[3] = new GeoPoint(32, -175);
+ geoValues[4] = new GeoPoint(-11, 178);
+
+ for (int i = 0; i < 5; i++) {
+ builders.add(
+ client().prepareIndex(DATELINE_IDX_NAME)
+ .setSource(
+ jsonBuilder().startObject()
+ .array(SINGLE_VALUED_FIELD_NAME, geoValues[i].lon(), geoValues[i].lat())
+ .field(NUMBER_FIELD_NAME, i)
+ .field("tag", "tag" + i)
+ .endObject()
+ )
+ );
+ }
+ assertAcked(
+ prepareCreate(HIGH_CARD_IDX_NAME).setSettings(Settings.builder().put("number_of_shards", 2))
+ .setMapping(
+ SINGLE_VALUED_FIELD_NAME,
+ "type=geo_point",
+ MULTI_VALUED_FIELD_NAME,
+ "type=geo_point",
+ NUMBER_FIELD_NAME,
+ "type=long,store=true",
+ "tag",
+ "type=keyword"
+ )
+ );
+
+ for (int i = 0; i < 2000; i++) {
+ singleVal = singleValues[i % numUniqueGeoPoints];
+ builders.add(
+ client().prepareIndex(HIGH_CARD_IDX_NAME)
+ .setSource(
+ jsonBuilder().startObject()
+ .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat())
+ .startArray(MULTI_VALUED_FIELD_NAME)
+ .startArray()
+ .value(multiValues[i % numUniqueGeoPoints].lon())
+ .value(multiValues[i % numUniqueGeoPoints].lat())
+ .endArray()
+ .startArray()
+ .value(multiValues[(i + 1) % numUniqueGeoPoints].lon())
+ .value(multiValues[(i + 1) % numUniqueGeoPoints].lat())
+ .endArray()
+ .endArray()
+ .field(NUMBER_FIELD_NAME, i)
+ .field("tag", "tag" + i)
+ .endObject()
+ )
+ );
+ updateGeohashBucketsCentroid(singleVal);
+ }
+
+ builders.add(
+ client().prepareIndex(IDX_ZERO_NAME)
+ .setSource(jsonBuilder().startObject().array(SINGLE_VALUED_FIELD_NAME, 0.0, 1.0).endObject())
+ );
+ assertAcked(prepareCreate(IDX_ZERO_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point"));
+
+ indexRandom(true, builders);
+ ensureSearchable();
+
+ // Added to debug a test failure where the terms aggregation seems to be reporting two documents with the same
+ // value for NUMBER_FIELD_NAME. This will check that after random indexing each document only has 1 value for
+ // NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting
+ // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type.
+ SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME)
+ .addStoredField(NUMBER_FIELD_NAME)
+ .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC))
+ .setSize(5000)
+ .get();
+ assertSearchResponse(response);
+ long totalHits = response.getHits().getTotalHits().value;
+ XContentBuilder builder = XContentFactory.jsonBuilder();
+ response.toXContent(builder, ToXContent.EMPTY_PARAMS);
+ logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder));
+ for (int i = 0; i < totalHits; i++) {
+ SearchHit searchHit = response.getHits().getAt(i);
+ assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx"));
+ DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME);
+
+ assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1));
+ Long value = hitField.getValue();
+ assertThat("Hit " + i + " has wrong value", value.intValue(), equalTo(i));
+ }
+ assertThat(totalHits, equalTo(2000L));
+ }
+
+ private void updateGeohashBucketsCentroid(final GeoPoint location) {
+ String hash = Geohash.stringEncode(location.lon(), location.lat(), Geohash.PRECISION);
+ for (int precision = Geohash.PRECISION; precision > 0; --precision) {
+ final String h = hash.substring(0, precision);
+ expectedDocCountsForGeoHash.put(h, expectedDocCountsForGeoHash.getOrDefault(h, 0) + 1);
+ expectedCentroidsForGeoHash.put(h, updateHashCentroid(h, location));
+ }
+ }
+
+ private GeoPoint updateHashCentroid(String hash, final GeoPoint location) {
+ GeoPoint centroid = expectedCentroidsForGeoHash.getOrDefault(hash, null);
+ if (centroid == null) {
+ return new GeoPoint(location.lat(), location.lon());
+ }
+ final int docCount = expectedDocCountsForGeoHash.get(hash);
+ final double newLon = centroid.lon() + (location.lon() - centroid.lon()) / docCount;
+ final double newLat = centroid.lat() + (location.lat() - centroid.lat()) / docCount;
+ return centroid.reset(newLat, newLon);
+ }
+
+ private void updateBoundsBottomRight(GeoPoint geoPoint, GeoPoint currentBound) {
+ if (geoPoint.lat() < currentBound.lat()) {
+ currentBound.resetLat(geoPoint.lat());
+ }
+ if (geoPoint.lon() > currentBound.lon()) {
+ currentBound.resetLon(geoPoint.lon());
+ }
+ }
+
+ private void updateBoundsTopLeft(GeoPoint geoPoint, GeoPoint currentBound) {
+ if (geoPoint.lat() > currentBound.lat()) {
+ currentBound.resetLat(geoPoint.lat());
+ }
+ if (geoPoint.lon() < currentBound.lon()) {
+ currentBound.resetLon(geoPoint.lon());
+ }
+ }
+}
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoBoundsIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java
similarity index 97%
rename from server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoBoundsIT.java
rename to modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java
index 3af3b9e5212f8..8cc82da12d69a 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoBoundsIT.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.geo.GeoPoint;
@@ -43,21 +43,21 @@
import java.util.List;
-import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
-import static org.opensearch.search.aggregations.AggregationBuilders.geoBounds;
-import static org.opensearch.search.aggregations.AggregationBuilders.global;
-import static org.opensearch.search.aggregations.AggregationBuilders.terms;
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.allOf;
-import static org.hamcrest.Matchers.closeTo;
-import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
-import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.sameInstance;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.closeTo;
+import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
+import static org.opensearch.search.aggregations.AggregationBuilders.global;
+import static org.opensearch.search.aggregations.AggregationBuilders.terms;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+import static org.opensearch.geo.tests.common.AggregationBuilders.geoBounds;
@OpenSearchIntegTestCase.SuiteScopeTestCase
-public class GeoBoundsIT extends AbstractGeoTestCase {
+public class GeoBoundsITTestCase extends AbstractGeoAggregatorModulePluginTestCase {
private static final String aggName = "geoBounds";
public void testSingleValuedField() throws Exception {
@@ -226,7 +226,8 @@ public void testSingleValuedFieldNearDateLineWrapLongitude() throws Exception {
}
/**
- * This test forces the {@link GeoBoundsAggregator} to resize the {@link BigArray}s it uses to ensure they are resized correctly
+ * This test forces the {@link GeoBoundsAggregator} to resize the {@link BigArray}s it uses to ensure they are
+ * resized correctly
*/
public void testSingleValuedFieldAsSubAggToHighCardTermsAgg() {
SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME)
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoCentroidITTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoCentroidITTestCase.java
new file mode 100644
index 0000000000000..e6d45e27b8f70
--- /dev/null
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoCentroidITTestCase.java
@@ -0,0 +1,84 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.geo.search.aggregations.metrics;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.common.geo.GeoPoint;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGrid;
+import org.opensearch.geo.tests.common.AggregationBuilders;
+import org.opensearch.search.aggregations.metrics.GeoCentroid;
+import org.opensearch.test.OpenSearchIntegTestCase;
+
+import java.util.List;
+
+import static org.hamcrest.Matchers.closeTo;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.opensearch.search.aggregations.AggregationBuilders.geoCentroid;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+
+@OpenSearchIntegTestCase.SuiteScopeTestCase
+public class GeoCentroidITTestCase extends AbstractGeoAggregatorModulePluginTestCase {
+ private static final String aggName = "geoCentroid";
+
+ public void testSingleValueFieldAsSubAggToGeohashGrid() throws Exception {
+ SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME)
+ .addAggregation(
+ AggregationBuilders.geohashGrid("geoGrid")
+ .field(SINGLE_VALUED_FIELD_NAME)
+ .subAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME))
+ )
+ .get();
+ assertSearchResponse(response);
+
+ GeoGrid grid = response.getAggregations().get("geoGrid");
+ assertThat(grid, notNullValue());
+ assertThat(grid.getName(), equalTo("geoGrid"));
+ List extends GeoGrid.Bucket> buckets = grid.getBuckets();
+ for (GeoGrid.Bucket cell : buckets) {
+ String geohash = cell.getKeyAsString();
+ GeoPoint expectedCentroid = expectedCentroidsForGeoHash.get(geohash);
+ GeoCentroid centroidAgg = cell.getAggregations().get(aggName);
+ assertThat(
+ "Geohash " + geohash + " has wrong centroid latitude ",
+ expectedCentroid.lat(),
+ closeTo(centroidAgg.centroid().lat(), GEOHASH_TOLERANCE)
+ );
+ assertThat(
+ "Geohash " + geohash + " has wrong centroid longitude",
+ expectedCentroid.lon(),
+ closeTo(centroidAgg.centroid().lon(), GEOHASH_TOLERANCE)
+ );
+ }
+ }
+}
diff --git a/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java
index ed8c7c06fb3e8..25dcf8db2c407 100644
--- a/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java
@@ -32,18 +32,74 @@
package org.opensearch.geo;
+import org.opensearch.geo.search.aggregations.bucket.composite.GeoTileGridValuesSourceBuilder;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregator;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoHashGrid;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoTileGrid;
+import org.opensearch.geo.search.aggregations.metrics.GeoBounds;
+import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder;
+import org.opensearch.geo.search.aggregations.metrics.InternalGeoBounds;
import org.opensearch.index.mapper.GeoShapeFieldMapper;
import org.opensearch.index.mapper.Mapper;
import org.opensearch.plugins.MapperPlugin;
import org.opensearch.plugins.Plugin;
+import org.opensearch.plugins.SearchPlugin;
+import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation;
import java.util.Collections;
+import java.util.List;
import java.util.Map;
-public class GeoModulePlugin extends Plugin implements MapperPlugin {
+public class GeoModulePlugin extends Plugin implements MapperPlugin, SearchPlugin {
@Override
public Map getMappers() {
return Collections.singletonMap(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser());
}
+
+ /**
+ * Registering {@link GeoBounds} aggregation on GeoPoint field.
+ */
+ @Override
+ public List getAggregations() {
+ final AggregationSpec geoBounds = new AggregationSpec(
+ GeoBoundsAggregationBuilder.NAME,
+ GeoBoundsAggregationBuilder::new,
+ GeoBoundsAggregationBuilder.PARSER
+ ).addResultReader(InternalGeoBounds::new).setAggregatorRegistrar(GeoBoundsAggregationBuilder::registerAggregators);
+
+ final AggregationSpec geoHashGrid = new AggregationSpec(
+ GeoHashGridAggregationBuilder.NAME,
+ GeoHashGridAggregationBuilder::new,
+ GeoHashGridAggregationBuilder.PARSER
+ ).addResultReader(InternalGeoHashGrid::new).setAggregatorRegistrar(GeoHashGridAggregationBuilder::registerAggregators);
+
+ final AggregationSpec geoTileGrid = new AggregationSpec(
+ GeoTileGridAggregationBuilder.NAME,
+ GeoTileGridAggregationBuilder::new,
+ GeoTileGridAggregationBuilder.PARSER
+ ).addResultReader(InternalGeoTileGrid::new).setAggregatorRegistrar(GeoTileGridAggregationBuilder::registerAggregators);
+ return List.of(geoBounds, geoHashGrid, geoTileGrid);
+ }
+
+ /**
+ * Registering the {@link GeoTileGridAggregator} in the {@link CompositeAggregation}.
+ *
+ * @return a {@link List} of {@link CompositeAggregationSpec}
+ */
+ @Override
+ public List getCompositeAggregations() {
+ return Collections.singletonList(
+ new CompositeAggregationSpec(
+ GeoTileGridValuesSourceBuilder::register,
+ GeoTileGridValuesSourceBuilder.class,
+ GeoTileGridValuesSourceBuilder.COMPOSITE_AGGREGATION_SERIALISATION_BYTE_CODE,
+ GeoTileGridValuesSourceBuilder::new,
+ GeoTileGridValuesSourceBuilder::parse,
+ GeoTileGridValuesSourceBuilder.TYPE
+ )
+ );
+ }
}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java
similarity index 87%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java
index 4b01a08d29a43..84d5943da287f 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.composite;
+package org.opensearch.geo.search.aggregations.bucket.composite;
import org.apache.lucene.index.IndexReader;
import org.opensearch.LegacyESVersion;
@@ -43,12 +43,15 @@
import org.opensearch.common.xcontent.ObjectParser;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentParser;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.CellIdSource;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.DocValueFormat;
-import org.opensearch.search.aggregations.bucket.geogrid.CellIdSource;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils;
+import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
+import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceConfig;
+import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceParserHelper;
+import org.opensearch.search.aggregations.bucket.GeoTileUtils;
import org.opensearch.search.aggregations.bucket.missing.MissingOrder;
import org.opensearch.search.aggregations.support.CoreValuesSourceType;
import org.opensearch.search.aggregations.support.ValuesSource;
@@ -88,13 +91,19 @@ CompositeValuesSourceConfig apply(
);
}
- static final String TYPE = "geotile_grid";
+ public static final String TYPE = "geotile_grid";
+ /*
+ use the TYPE parameter instead of Byte code. The byte code is added for backward compatibility and will be
+ removed in the next version.
+ */
+ @Deprecated
+ public static final Byte COMPOSITE_AGGREGATION_SERIALISATION_BYTE_CODE = 3;
static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey(
TYPE,
GeoTileCompositeSuppier.class
);
- private static final ObjectParser PARSER;
+ static final ObjectParser PARSER;
static {
PARSER = new ObjectParser<>(GeoTileGridValuesSourceBuilder.TYPE);
PARSER.declareInt(GeoTileGridValuesSourceBuilder::precision, new ParseField("precision"));
@@ -106,11 +115,11 @@ CompositeValuesSourceConfig apply(
CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER);
}
- static GeoTileGridValuesSourceBuilder parse(String name, XContentParser parser) throws IOException {
+ public static GeoTileGridValuesSourceBuilder parse(String name, XContentParser parser) throws IOException {
return PARSER.parse(parser, new GeoTileGridValuesSourceBuilder(name), null);
}
- static void register(ValuesSourceRegistry.Builder builder) {
+ public static void register(ValuesSourceRegistry.Builder builder) {
builder.register(
REGISTRY_KEY,
@@ -163,7 +172,7 @@ static void register(ValuesSourceRegistry.Builder builder) {
super(name);
}
- GeoTileGridValuesSourceBuilder(StreamInput in) throws IOException {
+ public GeoTileGridValuesSourceBuilder(StreamInput in) throws IOException {
super(in);
this.precision = in.readInt();
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
@@ -203,7 +212,7 @@ protected void doXContentBody(XContentBuilder builder, Params params) throws IOE
}
@Override
- String type() {
+ protected String type() {
return TYPE;
}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GeoTileValuesSource.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileValuesSource.java
similarity index 88%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GeoTileValuesSource.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileValuesSource.java
index 819dfc573bbe4..303e577e99e7b 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GeoTileValuesSource.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileValuesSource.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.composite;
+package org.opensearch.geo.search.aggregations.bucket.composite;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
@@ -38,7 +38,9 @@
import org.opensearch.common.util.BigArrays;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.search.DocValueFormat;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils;
+import org.opensearch.search.aggregations.bucket.composite.LongValuesSource;
+import org.opensearch.search.aggregations.bucket.composite.SingleDimensionValuesSource;
+import org.opensearch.search.aggregations.bucket.GeoTileUtils;
import org.opensearch.search.aggregations.bucket.missing.MissingOrder;
import java.io.IOException;
@@ -68,7 +70,7 @@ class GeoTileValuesSource extends LongValuesSource {
}
@Override
- void setAfter(Comparable value) {
+ protected void setAfter(Comparable value) {
if (missingBucket && value == null) {
afterValue = null;
} else if (value instanceof Number) {
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/BoundedCellValues.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BoundedCellValues.java
similarity index 97%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/BoundedCellValues.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BoundedCellValues.java
index ba824fc8f21dd..06d2dcaee3932 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/BoundedCellValues.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BoundedCellValues.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.index.fielddata.MultiGeoPointValues;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/BucketPriorityQueue.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BucketPriorityQueue.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/BucketPriorityQueue.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BucketPriorityQueue.java
index d6cfde0c46eae..70d0552b3e80b 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/BucketPriorityQueue.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BucketPriorityQueue.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.apache.lucene.util.PriorityQueue;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/CellIdSource.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellIdSource.java
similarity index 98%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/CellIdSource.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellIdSource.java
index 12d9043a2fd5f..d40029e9a762d 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/CellIdSource.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellIdSource.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/CellValues.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellValues.java
similarity index 97%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/CellValues.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellValues.java
index 9dc357659aae8..d01896c8136fa 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/CellValues.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellValues.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.index.fielddata.AbstractSortingNumericDocValues;
import org.opensearch.index.fielddata.MultiGeoPointValues;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGrid.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGrid.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGrid.java
index cfdb08f9ee3d7..4ae888640efc8 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGrid.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGrid.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java
similarity index 99%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java
index b08c40268c5cf..4a904b3aa2b16 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchException;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregator.java
similarity index 99%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregator.java
index 1ef8ba6c697f4..909772c61a960 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregator.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java
index 4049bf2c73640..bbaf9613fb216 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoUtils;
@@ -40,7 +40,7 @@
import org.opensearch.search.aggregations.AggregationBuilder;
import org.opensearch.search.aggregations.AggregatorFactories;
import org.opensearch.search.aggregations.AggregatorFactory;
-import org.opensearch.search.aggregations.metrics.GeoGridAggregatorSupplier;
+import org.opensearch.geo.search.aggregations.metrics.GeoGridAggregatorSupplier;
import org.opensearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.opensearch.search.aggregations.support.ValuesSourceConfig;
import org.opensearch.search.aggregations.support.ValuesSourceRegistry;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java
similarity index 97%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java
index 1106320c7431f..6ca7a4d8a9cb8 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.search.aggregations.Aggregator;
import org.opensearch.search.aggregations.AggregatorFactories;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java
similarity index 98%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java
index cdc801aaedffb..1914c07e831f7 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.geometry.utils.Geohash;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java
similarity index 95%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java
index f73360e3cb826..76ad515f34fe5 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.io.stream.StreamInput;
@@ -39,7 +39,8 @@
import org.opensearch.search.aggregations.AggregationBuilder;
import org.opensearch.search.aggregations.AggregatorFactories;
import org.opensearch.search.aggregations.AggregatorFactory;
-import org.opensearch.search.aggregations.metrics.GeoGridAggregatorSupplier;
+import org.opensearch.geo.search.aggregations.metrics.GeoGridAggregatorSupplier;
+import org.opensearch.search.aggregations.bucket.GeoTileUtils;
import org.opensearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.opensearch.search.aggregations.support.ValuesSourceConfig;
import org.opensearch.search.aggregations.support.ValuesSourceRegistry;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java
similarity index 97%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java
index 7a2b908148c4c..a205a9afde41e 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.search.aggregations.Aggregator;
import org.opensearch.search.aggregations.AggregatorFactories;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java
similarity index 97%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java
index ef8cd11a22498..b830988a3d410 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.index.query.QueryShardContext;
@@ -40,6 +40,7 @@
import org.opensearch.search.aggregations.CardinalityUpperBound;
import org.opensearch.search.aggregations.InternalAggregation;
import org.opensearch.search.aggregations.NonCollectingAggregator;
+import org.opensearch.search.aggregations.bucket.GeoTileUtils;
import org.opensearch.search.aggregations.support.CoreValuesSourceType;
import org.opensearch.search.aggregations.support.ValuesSource;
import org.opensearch.search.aggregations.support.ValuesSourceAggregatorFactory;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGrid.java
similarity index 99%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGrid.java
index 94a5ad5717854..9dbed7b27307a 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGrid.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java
similarity index 98%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java
index a187bfefb661f..93fcdbd098400 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java
similarity index 97%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java
index 7811b8774d04f..ff1247300939a 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.search.aggregations.InternalAggregations;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java
index f9c45dc41ceb1..659909e868651 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.io.stream.StreamInput;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoTileGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoTileGrid.java
similarity index 97%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoTileGrid.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoTileGrid.java
index efbd9a05d6a4d..fa544b5893f0c 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoTileGrid.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoTileGrid.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.search.aggregations.InternalAggregations;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoTileGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoTileGridBucket.java
similarity index 94%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoTileGridBucket.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoTileGridBucket.java
index f200f55232e00..65d736cfceb32 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/InternalGeoTileGridBucket.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoTileGridBucket.java
@@ -30,11 +30,12 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.search.aggregations.InternalAggregations;
+import org.opensearch.search.aggregations.bucket.GeoTileUtils;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGrid.java
similarity index 97%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGrid.java
index 3f85cf350c89c..adfffeddba59d 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGrid.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.CheckedFunction;
import org.opensearch.common.xcontent.ObjectParser;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java
index 08e5c15188ee6..80124cda50b19 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.search.aggregations.Aggregation;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java
index f20f972c1ce0a..109524e755c4d 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.xcontent.ObjectParser;
import org.opensearch.common.xcontent.XContentParser;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java
index 05c7a1c8d1663..4e6e454b08324 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.xcontent.XContentParser;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java
index 06915cc4210e1..8734c96a15578 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.xcontent.ObjectParser;
import org.opensearch.common.xcontent.XContentParser;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoTileGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGridBucket.java
similarity index 93%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoTileGridBucket.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGridBucket.java
index c8dec16f322ef..fd47c35f13de1 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/ParsedGeoTileGridBucket.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGridBucket.java
@@ -30,10 +30,11 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.xcontent.XContentParser;
+import org.opensearch.search.aggregations.bucket.GeoTileUtils;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/UnboundedCellValues.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/UnboundedCellValues.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/UnboundedCellValues.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/UnboundedCellValues.java
index f5a139cdb8d9d..c628c7bfdc8ec 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/UnboundedCellValues.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/UnboundedCellValues.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.index.fielddata.MultiGeoPointValues;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/package-info.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/package-info.java
similarity index 79%
rename from server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/package-info.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/package-info.java
index c59685e06cf79..d9183a0f742ef 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/geogrid/package-info.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/package-info.java
@@ -7,4 +7,4 @@
*/
/** geo_grid Aggregation package. */
-package org.opensearch.search.aggregations.bucket.geogrid;
+package org.opensearch.geo.search.aggregations.bucket.geogrid;
diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoBoundsAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoBoundsAggregator.java
new file mode 100644
index 0000000000000..4a39fa1da04eb
--- /dev/null
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoBoundsAggregator.java
@@ -0,0 +1,128 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo.search.aggregations.metrics;
+
+import org.opensearch.common.lease.Releasables;
+import org.opensearch.common.util.BigArrays;
+import org.opensearch.common.util.DoubleArray;
+import org.opensearch.search.aggregations.Aggregator;
+import org.opensearch.search.aggregations.InternalAggregation;
+import org.opensearch.search.aggregations.metrics.MetricsAggregator;
+import org.opensearch.search.aggregations.support.ValuesSource;
+import org.opensearch.search.aggregations.support.ValuesSourceConfig;
+import org.opensearch.search.internal.SearchContext;
+
+import java.io.IOException;
+import java.util.Map;
+
+/**
+ * Abstract class for doing the {@link GeoBounds} Aggregation over fields of type geo_shape and geo_point.
+ *
+ * @param Class extending the {@link ValuesSource} which will provide the data on which aggregation will happen.
+ * @opensearch.internal
+ */
+public abstract class AbstractGeoBoundsAggregator extends MetricsAggregator {
+
+ protected final T valuesSource;
+ protected final boolean wrapLongitude;
+ protected DoubleArray tops;
+ protected DoubleArray bottoms;
+ protected DoubleArray posLefts;
+ protected DoubleArray posRights;
+ protected DoubleArray negLefts;
+ protected DoubleArray negRights;
+
+ @SuppressWarnings("unchecked")
+ protected AbstractGeoBoundsAggregator(
+ String name,
+ SearchContext searchContext,
+ Aggregator aggregator,
+ ValuesSourceConfig valuesSourceConfig,
+ boolean wrapLongitude,
+ Map metaData
+ ) throws IOException {
+ super(name, searchContext, aggregator, metaData);
+ this.wrapLongitude = wrapLongitude;
+ valuesSource = valuesSourceConfig.hasValues() ? (T) valuesSourceConfig.getValuesSource() : null;
+
+ if (valuesSource != null) {
+ final BigArrays bigArrays = context.bigArrays();
+ tops = bigArrays.newDoubleArray(1, false);
+ tops.fill(0, tops.size(), Double.NEGATIVE_INFINITY);
+ bottoms = bigArrays.newDoubleArray(1, false);
+ bottoms.fill(0, bottoms.size(), Double.POSITIVE_INFINITY);
+ posLefts = bigArrays.newDoubleArray(1, false);
+ posLefts.fill(0, posLefts.size(), Double.POSITIVE_INFINITY);
+ posRights = bigArrays.newDoubleArray(1, false);
+ posRights.fill(0, posRights.size(), Double.NEGATIVE_INFINITY);
+ negLefts = bigArrays.newDoubleArray(1, false);
+ negLefts.fill(0, negLefts.size(), Double.POSITIVE_INFINITY);
+ negRights = bigArrays.newDoubleArray(1, false);
+ negRights.fill(0, negRights.size(), Double.NEGATIVE_INFINITY);
+ }
+ }
+
+ /**
+ * Build an empty aggregation.
+ */
+ @Override
+ public InternalAggregation buildEmptyAggregation() {
+ return new InternalGeoBounds(
+ name,
+ Double.NEGATIVE_INFINITY,
+ Double.POSITIVE_INFINITY,
+ Double.POSITIVE_INFINITY,
+ Double.NEGATIVE_INFINITY,
+ Double.POSITIVE_INFINITY,
+ Double.NEGATIVE_INFINITY,
+ wrapLongitude,
+ metadata()
+ );
+ }
+
+ /**
+ * Build an aggregation for data that has been collected into owningBucketOrd.
+ */
+ @Override
+ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
+ if (valuesSource == null) {
+ return buildEmptyAggregation();
+ }
+ double top = tops.get(owningBucketOrdinal);
+ double bottom = bottoms.get(owningBucketOrdinal);
+ double posLeft = posLefts.get(owningBucketOrdinal);
+ double posRight = posRights.get(owningBucketOrdinal);
+ double negLeft = negLefts.get(owningBucketOrdinal);
+ double negRight = negRights.get(owningBucketOrdinal);
+ return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, metadata());
+ }
+
+ @Override
+ public void doClose() {
+ Releasables.close(tops, bottoms, posLefts, posRights, negLefts, negRights);
+ }
+
+ protected void setBucketSize(final long bucket, final BigArrays bigArrays) {
+ if (bucket >= tops.size()) {
+ long from = tops.size();
+ tops = bigArrays.grow(tops, bucket + 1);
+ tops.fill(from, tops.size(), Double.NEGATIVE_INFINITY);
+ bottoms = bigArrays.resize(bottoms, tops.size());
+ bottoms.fill(from, bottoms.size(), Double.POSITIVE_INFINITY);
+ posLefts = bigArrays.resize(posLefts, tops.size());
+ posLefts.fill(from, posLefts.size(), Double.POSITIVE_INFINITY);
+ posRights = bigArrays.resize(posRights, tops.size());
+ posRights.fill(from, posRights.size(), Double.NEGATIVE_INFINITY);
+ negLefts = bigArrays.resize(negLefts, tops.size());
+ negLefts.fill(from, negLefts.size(), Double.POSITIVE_INFINITY);
+ negRights = bigArrays.resize(negRights, tops.size());
+ negRights.fill(from, negRights.size(), Double.NEGATIVE_INFINITY);
+ }
+ }
+}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBounds.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBounds.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBounds.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBounds.java
index 380fbce85ada7..81ef502dda130 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBounds.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBounds.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.search.aggregations.Aggregation;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
similarity index 93%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
index 64e27fa7e13d1..b2c441f9a951c 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
@@ -30,8 +30,9 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
+import org.opensearch.common.ParseField;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.ObjectParser;
@@ -40,6 +41,7 @@
import org.opensearch.search.aggregations.AggregationBuilder;
import org.opensearch.search.aggregations.AggregatorFactories;
import org.opensearch.search.aggregations.AggregatorFactory;
+import org.opensearch.search.aggregations.metrics.GeoBoundsAggregatorSupplier;
import org.opensearch.search.aggregations.support.CoreValuesSourceType;
import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.opensearch.search.aggregations.support.ValuesSourceConfig;
@@ -57,6 +59,7 @@
*/
public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder {
public static final String NAME = "geo_bounds";
+ private static final ParseField WRAP_LONGITUDE_FIELD = new ParseField("wrap_longitude");
public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>(
NAME,
GeoBoundsAggregatorSupplier.class
@@ -68,7 +71,7 @@ public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<
);
static {
ValuesSourceAggregationBuilder.declareFields(PARSER, false, false, false);
- PARSER.declareBoolean(GeoBoundsAggregationBuilder::wrapLongitude, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD);
+ PARSER.declareBoolean(GeoBoundsAggregationBuilder::wrapLongitude, WRAP_LONGITUDE_FIELD);
}
public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
@@ -121,13 +124,6 @@ public GeoBoundsAggregationBuilder wrapLongitude(boolean wrapLongitude) {
return this;
}
- /**
- * Get whether to wrap longitudes.
- */
- public boolean wrapLongitude() {
- return wrapLongitude;
- }
-
@Override
public BucketCardinality bucketCardinality() {
return BucketCardinality.NONE;
@@ -145,7 +141,7 @@ protected GeoBoundsAggregatorFactory innerBuild(
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
- builder.field(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD.getPreferredName(), wrapLongitude);
+ builder.field(WRAP_LONGITUDE_FIELD.getPreferredName(), wrapLongitude);
return builder;
}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregator.java
similarity index 51%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregator.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregator.java
index 054e8d4cb1c6c..a6518ea702be6 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregator.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregator.java
@@ -30,17 +30,13 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.apache.lucene.index.LeafReaderContext;
-import org.opensearch.common.ParseField;
import org.opensearch.common.geo.GeoPoint;
-import org.opensearch.common.lease.Releasables;
import org.opensearch.common.util.BigArrays;
-import org.opensearch.common.util.DoubleArray;
import org.opensearch.index.fielddata.MultiGeoPointValues;
import org.opensearch.search.aggregations.Aggregator;
-import org.opensearch.search.aggregations.InternalAggregation;
import org.opensearch.search.aggregations.LeafBucketCollector;
import org.opensearch.search.aggregations.LeafBucketCollectorBase;
import org.opensearch.search.aggregations.support.ValuesSource;
@@ -51,22 +47,11 @@
import java.util.Map;
/**
- * Aggregate all docs into a geographic bounds
+ * Aggregate all docs into a geographic bounds for field GeoPoint.
*
* @opensearch.internal
*/
-final class GeoBoundsAggregator extends MetricsAggregator {
-
- static final ParseField WRAP_LONGITUDE_FIELD = new ParseField("wrap_longitude");
-
- private final ValuesSource.GeoPoint valuesSource;
- private final boolean wrapLongitude;
- DoubleArray tops;
- DoubleArray bottoms;
- DoubleArray posLefts;
- DoubleArray posRights;
- DoubleArray negLefts;
- DoubleArray negRights;
+final class GeoBoundsAggregator extends AbstractGeoBoundsAggregator {
GeoBoundsAggregator(
String name,
@@ -76,25 +61,7 @@ final class GeoBoundsAggregator extends MetricsAggregator {
boolean wrapLongitude,
Map metadata
) throws IOException {
- super(name, aggregationContext, parent, metadata);
- // TODO: stop expecting nulls here
- this.valuesSource = valuesSourceConfig.hasValues() ? (ValuesSource.GeoPoint) valuesSourceConfig.getValuesSource() : null;
- this.wrapLongitude = wrapLongitude;
- if (valuesSource != null) {
- final BigArrays bigArrays = context.bigArrays();
- tops = bigArrays.newDoubleArray(1, false);
- tops.fill(0, tops.size(), Double.NEGATIVE_INFINITY);
- bottoms = bigArrays.newDoubleArray(1, false);
- bottoms.fill(0, bottoms.size(), Double.POSITIVE_INFINITY);
- posLefts = bigArrays.newDoubleArray(1, false);
- posLefts.fill(0, posLefts.size(), Double.POSITIVE_INFINITY);
- posRights = bigArrays.newDoubleArray(1, false);
- posRights.fill(0, posRights.size(), Double.NEGATIVE_INFINITY);
- negLefts = bigArrays.newDoubleArray(1, false);
- negLefts.fill(0, negLefts.size(), Double.POSITIVE_INFINITY);
- negRights = bigArrays.newDoubleArray(1, false);
- negRights.fill(0, negRights.size(), Double.NEGATIVE_INFINITY);
- }
+ super(name, aggregationContext, parent, valuesSourceConfig, wrapLongitude, metadata);
}
@Override
@@ -107,25 +74,10 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
- if (bucket >= tops.size()) {
- long from = tops.size();
- tops = bigArrays.grow(tops, bucket + 1);
- tops.fill(from, tops.size(), Double.NEGATIVE_INFINITY);
- bottoms = bigArrays.resize(bottoms, tops.size());
- bottoms.fill(from, bottoms.size(), Double.POSITIVE_INFINITY);
- posLefts = bigArrays.resize(posLefts, tops.size());
- posLefts.fill(from, posLefts.size(), Double.POSITIVE_INFINITY);
- posRights = bigArrays.resize(posRights, tops.size());
- posRights.fill(from, posRights.size(), Double.NEGATIVE_INFINITY);
- negLefts = bigArrays.resize(negLefts, tops.size());
- negLefts.fill(from, negLefts.size(), Double.POSITIVE_INFINITY);
- negRights = bigArrays.resize(negRights, tops.size());
- negRights.fill(from, negRights.size(), Double.NEGATIVE_INFINITY);
- }
+ setBucketSize(bucket, bigArrays);
if (values.advanceExact(doc)) {
final int valuesCount = values.docValueCount();
-
for (int i = 0; i < valuesCount; ++i) {
GeoPoint value = values.nextValue();
double top = tops.get(bucket);
@@ -163,38 +115,4 @@ public void collect(int doc, long bucket) throws IOException {
}
};
}
-
- @Override
- public InternalAggregation buildAggregation(long owningBucketOrdinal) {
- if (valuesSource == null) {
- return buildEmptyAggregation();
- }
- double top = tops.get(owningBucketOrdinal);
- double bottom = bottoms.get(owningBucketOrdinal);
- double posLeft = posLefts.get(owningBucketOrdinal);
- double posRight = posRights.get(owningBucketOrdinal);
- double negLeft = negLefts.get(owningBucketOrdinal);
- double negRight = negRights.get(owningBucketOrdinal);
- return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, metadata());
- }
-
- @Override
- public InternalAggregation buildEmptyAggregation() {
- return new InternalGeoBounds(
- name,
- Double.NEGATIVE_INFINITY,
- Double.POSITIVE_INFINITY,
- Double.POSITIVE_INFINITY,
- Double.NEGATIVE_INFINITY,
- Double.POSITIVE_INFINITY,
- Double.NEGATIVE_INFINITY,
- wrapLongitude,
- metadata()
- );
- }
-
- @Override
- public void doClose() {
- Releasables.close(tops, bottoms, posLefts, posRights, negLefts, negRights);
- }
}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
similarity index 98%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
index 2c6b75842b6f5..149e052b4db7d 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.aggregations.Aggregator;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoGridAggregatorSupplier.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoGridAggregatorSupplier.java
similarity index 93%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/GeoGridAggregatorSupplier.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoGridAggregatorSupplier.java
index 183c64f4e4af2..43ccb8b89545a 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoGridAggregatorSupplier.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoGridAggregatorSupplier.java
@@ -30,13 +30,13 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.common.geo.GeoBoundingBox;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregator;
import org.opensearch.search.aggregations.Aggregator;
import org.opensearch.search.aggregations.AggregatorFactories;
import org.opensearch.search.aggregations.CardinalityUpperBound;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoGridAggregator;
import org.opensearch.search.aggregations.support.ValuesSource;
import org.opensearch.search.internal.SearchContext;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalGeoBounds.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/InternalGeoBounds.java
similarity index 99%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/InternalGeoBounds.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/InternalGeoBounds.java
index 87018242ee8df..7c708de88a49c 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalGeoBounds.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/InternalGeoBounds.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoPoint;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/ParsedGeoBounds.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/ParsedGeoBounds.java
similarity index 98%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/ParsedGeoBounds.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/ParsedGeoBounds.java
index a482fcfdf08dd..7643ac9d9a010 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/ParsedGeoBounds.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/ParsedGeoBounds.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.common.Nullable;
import org.opensearch.common.collect.Tuple;
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoHashGridTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridAggregationBuilderTests.java
similarity index 72%
rename from server/src/test/java/org/opensearch/search/aggregations/bucket/GeoHashGridTests.java
rename to modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridAggregationBuilderTests.java
index 5e230a445ec98..00cb162e64c19 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoHashGridTests.java
+++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/GeoHashGridAggregationBuilderTests.java
@@ -30,14 +30,23 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket;
+package org.opensearch.geo.search.aggregations.bucket;
-import org.opensearch.common.geo.GeoBoundingBoxTests;
+import org.opensearch.geo.GeoModulePlugin;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder;
+import org.opensearch.geo.tests.common.RandomGeoGenerator;
+import org.opensearch.plugins.Plugin;
import org.opensearch.search.aggregations.BaseAggregationTestCase;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder;
-public class GeoHashGridTests extends BaseAggregationTestCase {
+import java.util.Collection;
+import java.util.Collections;
+
+public class GeoHashGridAggregationBuilderTests extends BaseAggregationTestCase {
+
+ protected Collection> getPlugins() {
+ return Collections.singletonList(GeoModulePlugin.class);
+ }
@Override
protected GeoHashGridAggregationBuilder createTestAggregatorBuilder() {
@@ -55,7 +64,7 @@ protected GeoHashGridAggregationBuilder createTestAggregatorBuilder() {
factory.shardSize(randomIntBetween(1, Integer.MAX_VALUE));
}
if (randomBoolean()) {
- factory.setGeoBoundingBox(GeoBoundingBoxTests.randomBBox());
+ factory.setGeoBoundingBox(RandomGeoGenerator.randomBBox());
}
return factory;
}
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoTileGridTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/GeoTileGridAggregationBuilderTests.java
similarity index 70%
rename from server/src/test/java/org/opensearch/search/aggregations/bucket/GeoTileGridTests.java
rename to modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/GeoTileGridAggregationBuilderTests.java
index d54667fb4f1a6..c7c0be21273bd 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/GeoTileGridTests.java
+++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/GeoTileGridAggregationBuilderTests.java
@@ -30,15 +30,24 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.bucket;
+package org.opensearch.geo.search.aggregations.bucket;
-import org.opensearch.common.geo.GeoBoundingBoxTests;
+import org.opensearch.geo.GeoModulePlugin;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
+import org.opensearch.geo.tests.common.RandomGeoGenerator;
+import org.opensearch.plugins.Plugin;
import org.opensearch.search.aggregations.BaseAggregationTestCase;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils;
+import org.opensearch.search.aggregations.bucket.GeoTileUtils;
-public class GeoTileGridTests extends BaseAggregationTestCase {
+import java.util.Collection;
+import java.util.Collections;
+
+public class GeoTileGridAggregationBuilderTests extends BaseAggregationTestCase {
+
+ protected Collection> getPlugins() {
+ return Collections.singletonList(GeoModulePlugin.class);
+ }
@Override
protected GeoTileGridAggregationBuilder createTestAggregatorBuilder() {
@@ -55,7 +64,7 @@ protected GeoTileGridAggregationBuilder createTestAggregatorBuilder() {
factory.shardSize(randomIntBetween(1, Integer.MAX_VALUE));
}
if (randomBoolean()) {
- factory.setGeoBoundingBox(GeoBoundingBoxTests.randomBBox());
+ factory.setGeoBoundingBox(RandomGeoGenerator.randomBBox());
}
return factory;
}
diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridAggregationCompositeAggregatorTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridAggregationCompositeAggregatorTests.java
new file mode 100644
index 0000000000000..3c7c292f9d193
--- /dev/null
+++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridAggregationCompositeAggregatorTests.java
@@ -0,0 +1,174 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo.search.aggregations.bucket.composite;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LatLonPoint;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.search.DocValuesFieldExistsQuery;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.junit.Before;
+import org.opensearch.common.geo.GeoPoint;
+import org.opensearch.geo.GeoModulePlugin;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
+import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregator;
+import org.opensearch.index.mapper.GeoPointFieldMapper;
+import org.opensearch.plugins.SearchPlugin;
+import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
+import org.opensearch.search.aggregations.bucket.GeoTileUtils;
+import org.opensearch.search.aggregations.composite.BaseCompositeAggregatorTestCase;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Testing the {@link GeoTileGridAggregator} as part of CompositeAggregation.
+ */
+public class GeoTileGridAggregationCompositeAggregatorTests extends BaseCompositeAggregatorTestCase {
+
+ protected List getSearchPlugins() {
+ return Collections.singletonList(new GeoModulePlugin());
+ }
+
+ @Override
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+ FIELD_TYPES.add(new GeoPointFieldMapper.GeoPointFieldType("geo_point"));
+ }
+
+ public void testUnmappedFieldWithGeopoint() throws Exception {
+ final List
*
diff --git a/server/src/test/java/org/opensearch/gateway/PrimaryShardAllocatorTests.java b/server/src/test/java/org/opensearch/gateway/PrimaryShardAllocatorTests.java
index 4a1ecb9661687..3c39ec9f03b2a 100644
--- a/server/src/test/java/org/opensearch/gateway/PrimaryShardAllocatorTests.java
+++ b/server/src/test/java/org/opensearch/gateway/PrimaryShardAllocatorTests.java
@@ -62,6 +62,7 @@
import org.opensearch.common.util.set.Sets;
import org.opensearch.env.ShardLockObtainFailedException;
import org.opensearch.index.shard.ShardId;
+import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint;
import org.opensearch.repositories.IndexId;
import org.opensearch.snapshots.Snapshot;
import org.opensearch.snapshots.SnapshotId;
@@ -205,6 +206,203 @@ public void testShardLockObtainFailedException() {
assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW);
}
+ /**
+ * Tests that replica with the highest primary term version will be selected as target
+ */
+ public void testPreferReplicaWithHighestPrimaryTerm() {
+ String allocId1 = randomAlphaOfLength(10);
+ String allocId2 = randomAlphaOfLength(10);
+ String allocId3 = randomAlphaOfLength(10);
+ final RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(
+ yesAllocationDeciders(),
+ CLUSTER_RECOVERED,
+ allocId1,
+ allocId2,
+ allocId3
+ );
+ testAllocator.addData(node1, allocId1, false, new ReplicationCheckpoint(shardId, 20, 10, 101, 1));
+ testAllocator.addData(node2, allocId2, false, new ReplicationCheckpoint(shardId, 22, 10, 120, 2));
+ testAllocator.addData(node3, allocId3, false, new ReplicationCheckpoint(shardId, 20, 10, 120, 2));
+ allocateAllUnassigned(allocation);
+ assertThat(allocation.routingNodesChanged(), equalTo(true));
+ assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
+ assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(),
+ equalTo(node2.getId())
+ );
+ // Assert node2's allocation id is used
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).allocationId().getId(),
+ equalTo(allocId2)
+ );
+ assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW);
+ }
+
+ /**
+ * Tests that replica with highest primary ter version will be selected as target
+ */
+ public void testPreferReplicaWithNullReplicationCheckpoint() {
+ String allocId1 = randomAlphaOfLength(10);
+ String allocId2 = randomAlphaOfLength(10);
+ String allocId3 = randomAlphaOfLength(10);
+ final RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(
+ yesAllocationDeciders(),
+ CLUSTER_RECOVERED,
+ allocId1,
+ allocId2,
+ allocId3
+ );
+ testAllocator.addData(node1, allocId1, false, new ReplicationCheckpoint(shardId, 20, 10, 101, 1));
+ testAllocator.addData(node2, allocId2, false);
+ testAllocator.addData(node3, allocId3, false, new ReplicationCheckpoint(shardId, 40, 10, 120, 2));
+ allocateAllUnassigned(allocation);
+ assertThat(allocation.routingNodesChanged(), equalTo(true));
+ assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
+ assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(),
+ equalTo(node3.getId())
+ );
+ // Assert node3's allocation id should be used as it has highest replication checkpoint
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).allocationId().getId(),
+ equalTo(allocId3)
+ );
+ assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW);
+ }
+
+ /**
+ * Tests that null ReplicationCheckpoint are ignored
+ */
+ public void testPreferReplicaWithAllNullReplicationCheckpoint() {
+ String allocId1 = randomAlphaOfLength(10);
+ String allocId2 = randomAlphaOfLength(10);
+ String allocId3 = randomAlphaOfLength(10);
+ final RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(
+ yesAllocationDeciders(),
+ CLUSTER_RECOVERED,
+ allocId1,
+ allocId2,
+ allocId3
+ );
+ testAllocator.addData(node1, allocId1, false, null, null);
+ testAllocator.addData(node2, allocId2, false, null, null);
+ testAllocator.addData(node3, allocId3, true, null, null);
+ allocateAllUnassigned(allocation);
+ assertThat(allocation.routingNodesChanged(), equalTo(true));
+ assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
+ assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(),
+ equalTo(node3.getId())
+ );
+ // Assert node3's allocation id should be used as it was previous primary
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).allocationId().getId(),
+ equalTo(allocId3)
+ );
+ assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW);
+ }
+
+ /**
+ * Tests that replica with highest segment info version will be selected as target on equal primary terms
+ */
+ public void testPreferReplicaWithHighestSegmentInfoVersion() {
+ String allocId1 = randomAlphaOfLength(10);
+ String allocId2 = randomAlphaOfLength(10);
+ String allocId3 = randomAlphaOfLength(10);
+ final RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(
+ yesAllocationDeciders(),
+ CLUSTER_RECOVERED,
+ allocId1,
+ allocId2,
+ allocId3
+ );
+ testAllocator.addData(node1, allocId1, false, new ReplicationCheckpoint(shardId, 10, 10, 101, 1));
+ testAllocator.addData(node2, allocId2, false, new ReplicationCheckpoint(shardId, 20, 10, 120, 3));
+ testAllocator.addData(node3, allocId3, false, new ReplicationCheckpoint(shardId, 20, 10, 120, 2));
+ allocateAllUnassigned(allocation);
+ assertThat(allocation.routingNodesChanged(), equalTo(true));
+ assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
+ assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(),
+ equalTo(node2.getId())
+ );
+ // Assert node2's allocation id is used
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).allocationId().getId(),
+ equalTo(allocId2)
+ );
+ assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW);
+ }
+
+ /**
+ * Tests that prefer allocation of replica at lower checkpoint but in sync set
+ */
+ public void testOutOfSyncHighestRepCheckpointIsIgnored() {
+ String allocId1 = randomAlphaOfLength(10);
+ String allocId2 = randomAlphaOfLength(10);
+ String allocId3 = randomAlphaOfLength(10);
+ final RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(
+ yesAllocationDeciders(),
+ CLUSTER_RECOVERED,
+ allocId1,
+ allocId3
+ );
+ testAllocator.addData(node1, allocId1, false, new ReplicationCheckpoint(shardId, 10, 10, 101, 1));
+ testAllocator.addData(node2, allocId2, false, new ReplicationCheckpoint(shardId, 20, 10, 120, 2));
+ testAllocator.addData(node3, allocId3, false, new ReplicationCheckpoint(shardId, 15, 10, 120, 2));
+ allocateAllUnassigned(allocation);
+ assertThat(allocation.routingNodesChanged(), equalTo(true));
+ assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
+ assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(),
+ equalTo(node3.getId())
+ );
+ // Assert node3's allocation id is used
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).allocationId().getId(),
+ equalTo(allocId3)
+ );
+ assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW);
+ }
+
+ /**
+ * Tests that prefer allocation of older primary over replica with higher replication checkpoint
+ */
+ public void testPreferAllocatingPreviousPrimaryWithLowerRepCheckpoint() {
+ String allocId1 = randomAlphaOfLength(10);
+ String allocId2 = randomAlphaOfLength(10);
+ String allocId3 = randomAlphaOfLength(10);
+ final RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(
+ yesAllocationDeciders(),
+ CLUSTER_RECOVERED,
+ allocId1,
+ allocId2,
+ allocId3
+ );
+ testAllocator.addData(node1, allocId1, true, new ReplicationCheckpoint(shardId, 10, 10, 101, 1));
+ testAllocator.addData(node2, allocId2, false, new ReplicationCheckpoint(shardId, 20, 10, 120, 2));
+ testAllocator.addData(node3, allocId3, false, new ReplicationCheckpoint(shardId, 15, 10, 120, 2));
+ allocateAllUnassigned(allocation);
+ assertThat(allocation.routingNodesChanged(), equalTo(true));
+ assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
+ assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(),
+ equalTo(node1.getId())
+ );
+ // Assert node1's allocation id is used with highest replication checkpoint
+ assertThat(
+ allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).allocationId().getId(),
+ equalTo(allocId1)
+ );
+ assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW);
+ }
+
/**
* Tests that when one node returns a ShardLockObtainFailedException and another properly loads the store, it will
* select the second node as target
@@ -219,7 +417,7 @@ public void testShardLockObtainFailedExceptionPreferOtherValidCopies() {
allocId2
);
testAllocator.addData(node1, allocId1, randomBoolean(), new ShardLockObtainFailedException(shardId, "test"));
- testAllocator.addData(node2, allocId2, randomBoolean(), null);
+ testAllocator.addData(node2, allocId2, randomBoolean());
allocateAllUnassigned(allocation);
assertThat(allocation.routingNodesChanged(), equalTo(true));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
@@ -601,17 +799,42 @@ public TestAllocator clear() {
return this;
}
+ public TestAllocator addData(
+ DiscoveryNode node,
+ String allocationId,
+ boolean primary,
+ ReplicationCheckpoint replicationCheckpoint
+ ) {
+ return addData(node, allocationId, primary, replicationCheckpoint, null);
+ }
+
public TestAllocator addData(DiscoveryNode node, String allocationId, boolean primary) {
- return addData(node, allocationId, primary, null);
+ return addData(node, allocationId, primary, ReplicationCheckpoint.empty(shardId), null);
}
public TestAllocator addData(DiscoveryNode node, String allocationId, boolean primary, @Nullable Exception storeException) {
+ return addData(node, allocationId, primary, ReplicationCheckpoint.empty(shardId), storeException);
+ }
+
+ public TestAllocator addData(
+ DiscoveryNode node,
+ String allocationId,
+ boolean primary,
+ ReplicationCheckpoint replicationCheckpoint,
+ @Nullable Exception storeException
+ ) {
if (data == null) {
data = new HashMap<>();
}
data.put(
node,
- new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node, allocationId, primary, storeException)
+ new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(
+ node,
+ allocationId,
+ primary,
+ replicationCheckpoint,
+ storeException
+ )
);
return this;
}
diff --git a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java
index 90aa36253fb7f..e02eac85beafb 100644
--- a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java
+++ b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java
@@ -42,6 +42,7 @@
import org.opensearch.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.index.translog.Translog;
+import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.VersionUtils;
@@ -854,4 +855,25 @@ public void testEnablingRemoteTranslogStoreFailsWhenRemoteSegmentDisabled() {
iae.getMessage()
);
}
+
+ public void testEnablingRemoteStoreFailsWhenReplicationTypeIsDocument() {
+ Settings indexSettings = Settings.builder()
+ .put("index.replication.type", ReplicationType.DOCUMENT)
+ .put("index.remote_store.enabled", true)
+ .build();
+ IllegalArgumentException iae = expectThrows(
+ IllegalArgumentException.class,
+ () -> IndexMetadata.INDEX_REMOTE_STORE_ENABLED_SETTING.get(indexSettings)
+ );
+ assertEquals("To enable index.remote_store.enabled, index.replication.type should be set to SEGMENT", iae.getMessage());
+ }
+
+ public void testEnablingRemoteStoreFailsWhenReplicationTypeIsDefault() {
+ Settings indexSettings = Settings.builder().put("index.remote_store.enabled", true).build();
+ IllegalArgumentException iae = expectThrows(
+ IllegalArgumentException.class,
+ () -> IndexMetadata.INDEX_REMOTE_STORE_ENABLED_SETTING.get(indexSettings)
+ );
+ assertEquals("To enable index.remote_store.enabled, index.replication.type should be set to SEGMENT", iae.getMessage());
+ }
}
diff --git a/server/src/test/java/org/opensearch/index/codec/CodecTests.java b/server/src/test/java/org/opensearch/index/codec/CodecTests.java
index 0275066f9af1b..0a6338333bffc 100644
--- a/server/src/test/java/org/opensearch/index/codec/CodecTests.java
+++ b/server/src/test/java/org/opensearch/index/codec/CodecTests.java
@@ -34,7 +34,7 @@
import org.apache.logging.log4j.LogManager;
import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.codecs.lucene92.Lucene92Codec;
+import org.apache.lucene.codecs.lucene94.Lucene94Codec;
import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
@@ -65,21 +65,21 @@ public class CodecTests extends OpenSearchTestCase {
public void testResolveDefaultCodecs() throws Exception {
CodecService codecService = createCodecService();
assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class));
- assertThat(codecService.codec("default"), instanceOf(Lucene92Codec.class));
+ assertThat(codecService.codec("default"), instanceOf(Lucene94Codec.class));
}
public void testDefault() throws Exception {
Codec codec = createCodecService().codec("default");
- assertStoredFieldsCompressionEquals(Lucene92Codec.Mode.BEST_SPEED, codec);
+ assertStoredFieldsCompressionEquals(Lucene94Codec.Mode.BEST_SPEED, codec);
}
public void testBestCompression() throws Exception {
Codec codec = createCodecService().codec("best_compression");
- assertStoredFieldsCompressionEquals(Lucene92Codec.Mode.BEST_COMPRESSION, codec);
+ assertStoredFieldsCompressionEquals(Lucene94Codec.Mode.BEST_COMPRESSION, codec);
}
// write some docs with it, inspect .si to see this was the used compression
- private void assertStoredFieldsCompressionEquals(Lucene92Codec.Mode expected, Codec actual) throws Exception {
+ private void assertStoredFieldsCompressionEquals(Lucene94Codec.Mode expected, Codec actual) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(null);
iwc.setCodec(actual);
@@ -91,7 +91,7 @@ private void assertStoredFieldsCompressionEquals(Lucene92Codec.Mode expected, Co
SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader();
String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY);
assertNotNull(v);
- assertEquals(expected, Lucene92Codec.Mode.valueOf(v));
+ assertEquals(expected, Lucene94Codec.Mode.valueOf(v));
ir.close();
dir.close();
}
diff --git a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java
index 340811352a203..575997dc2609e 100644
--- a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java
+++ b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java
@@ -32,7 +32,7 @@
package org.opensearch.index.engine;
import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene92.Lucene92Codec;
+import org.apache.lucene.codecs.lucene94.Lucene94Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
@@ -70,7 +70,7 @@ public void testExceptionsAreNotCached() {
public void testCompletionStatsCache() throws IOException, InterruptedException {
final IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
final PostingsFormat postingsFormat = new Completion90PostingsFormat();
- indexWriterConfig.setCodec(new Lucene92Codec() {
+ indexWriterConfig.setCodec(new Lucene94Codec() {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
return postingsFormat; // all fields are suggest fields
diff --git a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java
index 7ddd92ea7b36e..269d89352fb18 100644
--- a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java
+++ b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java
@@ -16,6 +16,7 @@
import org.opensearch.index.codec.CodecService;
import org.opensearch.index.codec.CodecServiceFactory;
import org.opensearch.index.seqno.RetentionLeases;
+import org.opensearch.index.translog.InternalTranslogFactory;
import org.opensearch.index.translog.TranslogDeletionPolicy;
import org.opensearch.index.translog.TranslogDeletionPolicyFactory;
import org.opensearch.index.translog.TranslogReader;
@@ -66,7 +67,8 @@ public void testCreateEngineConfigFromFactory() {
() -> new RetentionLeases(0, 0, Collections.emptyList()),
null,
null,
- false
+ false,
+ new InternalTranslogFactory()
);
assertNotNull(config.getCodec());
@@ -143,7 +145,8 @@ public void testCreateCodecServiceFromFactory() {
() -> new RetentionLeases(0, 0, Collections.emptyList()),
null,
null,
- false
+ false,
+ new InternalTranslogFactory()
);
assertNotNull(config.getCodec());
}
diff --git a/server/src/test/java/org/opensearch/index/engine/EngineConfigTests.java b/server/src/test/java/org/opensearch/index/engine/EngineConfigTests.java
index 1c6d06e9bcc08..1754d6082b86d 100644
--- a/server/src/test/java/org/opensearch/index/engine/EngineConfigTests.java
+++ b/server/src/test/java/org/opensearch/index/engine/EngineConfigTests.java
@@ -13,6 +13,7 @@
import org.opensearch.common.settings.Settings;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.seqno.RetentionLeases;
+import org.opensearch.index.translog.InternalTranslogFactory;
import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.test.IndexSettingsModule;
import org.opensearch.test.OpenSearchTestCase;
@@ -102,7 +103,8 @@ private EngineConfig createReadOnlyEngine(IndexSettings indexSettings) {
() -> RetentionLeases.EMPTY,
null,
null,
- true
+ true,
+ new InternalTranslogFactory()
);
}
}
diff --git a/server/src/test/java/org/opensearch/index/engine/NRTReplicationEngineTests.java b/server/src/test/java/org/opensearch/index/engine/NRTReplicationEngineTests.java
index 675ff860c3334..1fe1a37dedae0 100644
--- a/server/src/test/java/org/opensearch/index/engine/NRTReplicationEngineTests.java
+++ b/server/src/test/java/org/opensearch/index/engine/NRTReplicationEngineTests.java
@@ -12,18 +12,25 @@
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.SegmentInfos;
import org.hamcrest.MatcherAssert;
+import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.concurrent.GatedCloseable;
import org.opensearch.common.lucene.Lucene;
import org.opensearch.common.lucene.search.Queries;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.index.IndexSettings;
import org.opensearch.index.mapper.ParsedDocument;
+import org.opensearch.index.seqno.LocalCheckpointTracker;
import org.opensearch.index.seqno.SequenceNumbers;
import org.opensearch.index.store.Store;
import org.opensearch.index.translog.TestTranslog;
import org.opensearch.index.translog.Translog;
+import org.opensearch.indices.replication.common.ReplicationType;
+import org.opensearch.test.IndexSettingsModule;
import java.io.IOException;
import java.nio.file.Path;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
@@ -31,6 +38,8 @@
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.opensearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED;
+import static org.opensearch.index.seqno.SequenceNumbers.LOCAL_CHECKPOINT_KEY;
+import static org.opensearch.index.seqno.SequenceNumbers.MAX_SEQ_NO;
public class NRTReplicationEngineTests extends EngineTestCase {
@@ -210,6 +219,49 @@ public void testTrimTranslogOps() throws Exception {
}
}
+ public void testCommitSegmentInfos() throws Exception {
+ // This test asserts that NRTReplication#commitSegmentInfos creates a new commit point with the latest checkpoints
+ // stored in user data.
+ final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
+ final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
+ "index",
+ Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT).build()
+ );
+ try (
+ final Store nrtEngineStore = createStore(indexSettings, newDirectory());
+ final NRTReplicationEngine nrtEngine = buildNrtReplicaEngine(globalCheckpoint, nrtEngineStore)
+ ) {
+ List operations = generateHistoryOnReplica(between(1, 500), randomBoolean(), randomBoolean(), randomBoolean())
+ .stream()
+ .filter(op -> op.operationType().equals(Engine.Operation.TYPE.INDEX))
+ .collect(Collectors.toList());
+ for (Engine.Operation op : operations) {
+ applyOperation(nrtEngine, op);
+ }
+
+ final SegmentInfos previousInfos = nrtEngine.getLatestSegmentInfos();
+ LocalCheckpointTracker localCheckpointTracker = nrtEngine.getLocalCheckpointTracker();
+ final long maxSeqNo = localCheckpointTracker.getMaxSeqNo();
+ final long processedCheckpoint = localCheckpointTracker.getProcessedCheckpoint();
+ nrtEngine.commitSegmentInfos();
+
+ // ensure getLatestSegmentInfos returns an updated infos ref with correct userdata.
+ final SegmentInfos latestSegmentInfos = nrtEngine.getLatestSegmentInfos();
+ assertEquals(previousInfos.getGeneration(), latestSegmentInfos.getLastGeneration());
+ Map userData = latestSegmentInfos.getUserData();
+ assertEquals(processedCheckpoint, localCheckpointTracker.getProcessedCheckpoint());
+ assertEquals(maxSeqNo, Long.parseLong(userData.get(MAX_SEQ_NO)));
+ assertEquals(processedCheckpoint, Long.parseLong(userData.get(LOCAL_CHECKPOINT_KEY)));
+
+ // read infos from store and assert userdata
+ final String lastCommitSegmentsFileName = SegmentInfos.getLastCommitSegmentsFileName(nrtEngineStore.directory());
+ final SegmentInfos committedInfos = SegmentInfos.readCommit(nrtEngineStore.directory(), lastCommitSegmentsFileName);
+ userData = committedInfos.getUserData();
+ assertEquals(processedCheckpoint, Long.parseLong(userData.get(LOCAL_CHECKPOINT_KEY)));
+ assertEquals(maxSeqNo, Long.parseLong(userData.get(MAX_SEQ_NO)));
+ }
+ }
+
private void assertMatchingSegmentsAndCheckpoints(NRTReplicationEngine nrtEngine, SegmentInfos expectedSegmentInfos)
throws IOException {
assertEquals(engine.getPersistedLocalCheckpoint(), nrtEngine.getPersistedLocalCheckpoint());
diff --git a/server/src/test/java/org/opensearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/opensearch/index/fielddata/AbstractStringFieldDataTestCase.java
index 763ee59a385a2..76496491b3ed4 100644
--- a/server/src/test/java/org/opensearch/index/fielddata/AbstractStringFieldDataTestCase.java
+++ b/server/src/test/java/org/opensearch/index/fielddata/AbstractStringFieldDataTestCase.java
@@ -52,6 +52,7 @@
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopFieldDocs;
+import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.join.QueryBitSetProducer;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
@@ -340,7 +341,13 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException {
randomBoolean() ? numDocs : randomIntBetween(10, numDocs),
new Sort(sortField)
);
- assertEquals(numDocs, topDocs.totalHits.value);
+ // As of Lucene 9.0.0, totalHits may be a lower bound
+ if (topDocs.totalHits.relation == TotalHits.Relation.EQUAL_TO) {
+ assertEquals(numDocs, topDocs.totalHits.value);
+ } else {
+ assertTrue(1000 <= topDocs.totalHits.value);
+ assertTrue(numDocs >= topDocs.totalHits.value);
+ }
BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef();
for (int i = 0; i < topDocs.scoreDocs.length; ++i) {
final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value");
diff --git a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java
index 079475d9f3554..d6c89342c9df2 100644
--- a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java
@@ -178,6 +178,26 @@ public void testFieldsWithFilledArrayShouldThrowException() throws Exception {
}
}
+ public void testDotAsFieldName() throws Exception {
+ String mapping = Strings.toString(
+ XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject(".")
+ .field("type", "text")
+ .endObject()
+ .endObject()
+ .endObject()
+ );
+
+ try {
+ createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping));
+ fail("Expected MapperParsingException");
+ } catch (MapperParsingException e) {
+ assertThat(e.getMessage(), containsString("Invalid field name"));
+ }
+ }
+
public void testFieldPropertiesArray() throws Exception {
String mapping = Strings.toString(
XContentFactory.jsonBuilder()
diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java
index 57c2289c848ef..8c00ab97a46ea 100644
--- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java
+++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java
@@ -3522,7 +3522,7 @@ public void testCheckpointRefreshListenerWithNull() throws IOException {
}
/**
- * creates a new initializing shard. The shard will will be put in its proper path under the
+ * creates a new initializing shard. The shard will be put in its proper path under the
* current node id the shard is assigned to.
* @param checkpointPublisher Segment Replication Checkpoint Publisher to publish checkpoint
*/
diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java
index 3fcf6116b11a2..23371a39871c7 100644
--- a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java
+++ b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java
@@ -8,20 +8,108 @@
package org.opensearch.index.shard;
+import org.opensearch.action.delete.DeleteRequest;
+import org.opensearch.action.index.IndexRequest;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
+import org.opensearch.common.xcontent.XContentType;
import org.opensearch.index.IndexSettings;
+import org.opensearch.index.engine.DocIdSeqNoAndSource;
+import org.opensearch.index.engine.InternalEngine;
+import org.opensearch.index.engine.NRTReplicationEngine;
import org.opensearch.index.engine.NRTReplicationEngineFactory;
+import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.replication.OpenSearchIndexLevelReplicationTestCase;
+import org.opensearch.indices.replication.checkpoint.SegmentReplicationCheckpointPublisher;
+import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint;
import org.opensearch.indices.replication.common.ReplicationType;
+import java.io.IOException;
+import java.util.List;
+
+import static java.util.Arrays.asList;
+import static org.hamcrest.Matchers.equalTo;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
public class SegmentReplicationIndexShardTests extends OpenSearchIndexLevelReplicationTestCase {
private static final Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
.build();
+ /**
+ * Test that latestReplicationCheckpoint returns null only for docrep enabled indices
+ */
+ public void testReplicationCheckpointNullForDocRep() throws IOException {
+ Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, "DOCUMENT").put(Settings.EMPTY).build();
+ final IndexShard indexShard = newStartedShard(false, indexSettings);
+ assertNull(indexShard.getLatestReplicationCheckpoint());
+ closeShards(indexShard);
+ }
+
+ /**
+ * Test that latestReplicationCheckpoint returns ReplicationCheckpoint for segrep enabled indices
+ */
+ public void testReplicationCheckpointNotNullForSegReb() throws IOException {
+ Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, "SEGMENT").put(Settings.EMPTY).build();
+ final IndexShard indexShard = newStartedShard(indexSettings);
+ final ReplicationCheckpoint replicationCheckpoint = indexShard.getLatestReplicationCheckpoint();
+ assertNotNull(replicationCheckpoint);
+ closeShards(indexShard);
+ }
+
+ public void testSegmentReplication_Index_Update_Delete() throws Exception {
+ String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}";
+ try (ReplicationGroup shards = createGroup(2, settings, mappings, new NRTReplicationEngineFactory())) {
+ shards.startAll();
+ final IndexShard primaryShard = shards.getPrimary();
+
+ final int numDocs = randomIntBetween(100, 200);
+ for (int i = 0; i < numDocs; i++) {
+ shards.index(new IndexRequest(index.getName()).id(String.valueOf(i)).source("{\"foo\": \"bar\"}", XContentType.JSON));
+ }
+
+ primaryShard.refresh("Test");
+ replicateSegments(primaryShard, shards.getReplicas());
+
+ shards.assertAllEqual(numDocs);
+
+ for (int i = 0; i < numDocs; i++) {
+ // randomly update docs.
+ if (randomBoolean()) {
+ shards.index(
+ new IndexRequest(index.getName()).id(String.valueOf(i)).source("{ \"foo\" : \"baz\" }", XContentType.JSON)
+ );
+ }
+ }
+
+ primaryShard.refresh("Test");
+ replicateSegments(primaryShard, shards.getReplicas());
+ shards.assertAllEqual(numDocs);
+
+ final List docs = getDocIdAndSeqNos(primaryShard);
+ for (IndexShard shard : shards.getReplicas()) {
+ assertEquals(getDocIdAndSeqNos(shard), docs);
+ }
+ for (int i = 0; i < numDocs; i++) {
+ // randomly delete.
+ if (randomBoolean()) {
+ shards.delete(new DeleteRequest(index.getName()).id(String.valueOf(i)));
+ }
+ }
+ primaryShard.refresh("Test");
+ replicateSegments(primaryShard, shards.getReplicas());
+ final List docsAfterDelete = getDocIdAndSeqNos(primaryShard);
+ for (IndexShard shard : shards.getReplicas()) {
+ assertEquals(getDocIdAndSeqNos(shard), docsAfterDelete);
+ }
+ }
+ }
+
public void testIgnoreShardIdle() throws Exception {
try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) {
shards.startAll();
@@ -56,4 +144,113 @@ public void testIgnoreShardIdle() throws Exception {
replica.awaitShardSearchActive(b -> assertFalse("A new RefreshListener should not be registered", b));
}
}
+
+ /**
+ * here we are starting a new primary shard in PrimaryMode and testing if the shard publishes checkpoint after refresh.
+ */
+ public void testPublishCheckpointOnPrimaryMode() throws IOException {
+ final SegmentReplicationCheckpointPublisher mock = mock(SegmentReplicationCheckpointPublisher.class);
+ IndexShard shard = newStartedShard(true);
+ CheckpointRefreshListener refreshListener = new CheckpointRefreshListener(shard, mock);
+ refreshListener.afterRefresh(true);
+
+ // verify checkpoint is published
+ verify(mock, times(1)).publish(any());
+ closeShards(shard);
+ }
+
+ /**
+ * here we are starting a new primary shard in PrimaryMode initially and starting relocation handoff. Later we complete relocation handoff then shard is no longer
+ * in PrimaryMode, and we test if the shard does not publish checkpoint after refresh.
+ */
+ public void testPublishCheckpointAfterRelocationHandOff() throws IOException {
+ final SegmentReplicationCheckpointPublisher mock = mock(SegmentReplicationCheckpointPublisher.class);
+ IndexShard shard = newStartedShard(true);
+ CheckpointRefreshListener refreshListener = new CheckpointRefreshListener(shard, mock);
+ String id = shard.routingEntry().allocationId().getId();
+
+ // Starting relocation handoff
+ shard.getReplicationTracker().startRelocationHandoff(id);
+
+ // Completing relocation handoff
+ shard.getReplicationTracker().completeRelocationHandoff();
+ refreshListener.afterRefresh(true);
+
+ // verify checkpoint is not published
+ verify(mock, times(0)).publish(any());
+ closeShards(shard);
+ }
+
+ public void testNRTReplicaPromotedAsPrimary() throws Exception {
+ try (ReplicationGroup shards = createGroup(2, settings, new NRTReplicationEngineFactory())) {
+ shards.startAll();
+ IndexShard oldPrimary = shards.getPrimary();
+ final IndexShard nextPrimary = shards.getReplicas().get(0);
+ final IndexShard replica = shards.getReplicas().get(1);
+
+ // 1. Create ops that are in the index and xlog of both shards but not yet part of a commit point.
+ final int numDocs = shards.indexDocs(randomInt(10));
+
+ // refresh and copy the segments over.
+ oldPrimary.refresh("Test");
+ replicateSegments(oldPrimary, shards.getReplicas());
+
+ // at this point both shards should have numDocs persisted and searchable.
+ assertDocCounts(oldPrimary, numDocs, numDocs);
+ for (IndexShard shard : shards.getReplicas()) {
+ assertDocCounts(shard, numDocs, numDocs);
+ }
+
+ // 2. Create ops that are in the replica's xlog, not in the index.
+ // index some more into both but don't replicate. replica will have only numDocs searchable, but should have totalDocs
+ // persisted.
+ final int totalDocs = numDocs + shards.indexDocs(randomInt(10));
+
+ assertDocCounts(oldPrimary, totalDocs, totalDocs);
+ for (IndexShard shard : shards.getReplicas()) {
+ assertDocCounts(shard, totalDocs, numDocs);
+ }
+
+ // promote the replica
+ shards.syncGlobalCheckpoint();
+ assertEquals(totalDocs, nextPrimary.translogStats().estimatedNumberOfOperations());
+ shards.promoteReplicaToPrimary(nextPrimary);
+
+ // close and start the oldPrimary as a replica.
+ oldPrimary.close("demoted", false);
+ oldPrimary.store().close();
+ oldPrimary = shards.addReplicaWithExistingPath(oldPrimary.shardPath(), oldPrimary.routingEntry().currentNodeId());
+ shards.recoverReplica(oldPrimary);
+
+ assertEquals(NRTReplicationEngine.class, oldPrimary.getEngine().getClass());
+ assertEquals(InternalEngine.class, nextPrimary.getEngine().getClass());
+ assertDocCounts(nextPrimary, totalDocs, totalDocs);
+ assertEquals(0, nextPrimary.translogStats().estimatedNumberOfOperations());
+
+ // refresh and push segments to our other replica.
+ nextPrimary.refresh("test");
+ replicateSegments(nextPrimary, asList(replica));
+
+ for (IndexShard shard : shards) {
+ assertConsistentHistoryBetweenTranslogAndLucene(shard);
+ }
+ final List docsAfterRecovery = getDocIdAndSeqNos(shards.getPrimary());
+ for (IndexShard shard : shards.getReplicas()) {
+ assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterRecovery));
+ }
+ }
+ }
+
+ /**
+ * Assert persisted and searchable doc counts. This method should not be used while docs are concurrently indexed because
+ * it asserts point in time seqNos are relative to the doc counts.
+ */
+ private void assertDocCounts(IndexShard indexShard, int expectedPersistedDocCount, int expectedSearchableDocCount) throws IOException {
+ assertDocCount(indexShard, expectedSearchableDocCount);
+ // assigned seqNos start at 0, so assert max & local seqNos are 1 less than our persisted doc count.
+ assertEquals(expectedPersistedDocCount - 1, indexShard.seqNoStats().getMaxSeqNo());
+ assertEquals(expectedPersistedDocCount - 1, indexShard.seqNoStats().getLocalCheckpoint());
+ // processed cp should be 1 less than our searchable doc count.
+ assertEquals(expectedSearchableDocCount - 1, indexShard.getProcessedLocalCheckpoint());
+ }
}
diff --git a/server/src/test/java/org/opensearch/index/store/RemoteDirectoryTests.java b/server/src/test/java/org/opensearch/index/store/RemoteDirectoryTests.java
index 2ded77d2cecfd..97575248b4ad3 100644
--- a/server/src/test/java/org/opensearch/index/store/RemoteDirectoryTests.java
+++ b/server/src/test/java/org/opensearch/index/store/RemoteDirectoryTests.java
@@ -15,11 +15,13 @@
import org.opensearch.common.blobstore.BlobContainer;
import org.opensearch.common.blobstore.BlobMetadata;
import org.opensearch.common.blobstore.support.PlainBlobMetadata;
+import org.opensearch.common.collect.Set;
import org.opensearch.test.OpenSearchTestCase;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.NoSuchFileException;
+import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@@ -67,6 +69,24 @@ public void testListAllException() throws IOException {
assertThrows(IOException.class, () -> remoteDirectory.listAll());
}
+ public void testListFilesByPrefix() throws IOException {
+ Map fileNames = Stream.of("abc", "abd", "abe", "abf", "abg")
+ .collect(Collectors.toMap(filename -> filename, filename -> new PlainBlobMetadata(filename, 100)));
+
+ when(blobContainer.listBlobsByPrefix("ab")).thenReturn(fileNames);
+
+ Collection actualFileNames = remoteDirectory.listFilesByPrefix("ab");
+ Collection expectedFileName = Set.of("abc", "abd", "abe", "abf", "abg");
+ assertEquals(expectedFileName, actualFileNames);
+ }
+
+ public void testListFilesByPrefixException() throws IOException {
+ when(blobContainer.listBlobsByPrefix("abc")).thenThrow(new IOException("Error reading blob store"));
+
+ assertThrows(IOException.class, () -> remoteDirectory.listFilesByPrefix("abc"));
+ verify(blobContainer).listBlobsByPrefix("abc");
+ }
+
public void testDeleteFile() throws IOException {
remoteDirectory.deleteFile("segment_1");
diff --git a/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java
new file mode 100644
index 0000000000000..4eabfa74625f2
--- /dev/null
+++ b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java
@@ -0,0 +1,339 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.index.store;
+
+import org.apache.lucene.codecs.CodecUtil;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.tests.util.LuceneTestCase;
+import org.junit.Before;
+import org.opensearch.common.collect.Set;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.io.IOException;
+import java.nio.file.NoSuchFileException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.startsWith;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class RemoteSegmentStoreDirectoryTests extends OpenSearchTestCase {
+ private RemoteDirectory remoteDataDirectory;
+ private RemoteDirectory remoteMetadataDirectory;
+
+ private RemoteSegmentStoreDirectory remoteSegmentStoreDirectory;
+
+ @Before
+ public void setup() throws IOException {
+ remoteDataDirectory = mock(RemoteDirectory.class);
+ remoteMetadataDirectory = mock(RemoteDirectory.class);
+
+ remoteSegmentStoreDirectory = new RemoteSegmentStoreDirectory(remoteDataDirectory, remoteMetadataDirectory);
+ }
+
+ public void testUploadedSegmentMetadataToString() {
+ RemoteSegmentStoreDirectory.UploadedSegmentMetadata metadata = new RemoteSegmentStoreDirectory.UploadedSegmentMetadata(
+ "abc",
+ "pqr",
+ "123456"
+ );
+ assertEquals("abc::pqr::123456", metadata.toString());
+ }
+
+ public void testUploadedSegmentMetadataFromString() {
+ RemoteSegmentStoreDirectory.UploadedSegmentMetadata metadata = RemoteSegmentStoreDirectory.UploadedSegmentMetadata.fromString(
+ "_0.cfe::_0.cfe__uuidxyz::4567"
+ );
+ assertEquals("_0.cfe::_0.cfe__uuidxyz::4567", metadata.toString());
+ }
+
+ public void testGetMetadataFilename() {
+ // Generation 23 is replaced by n due to radix 32
+ assertEquals(
+ RemoteSegmentStoreDirectory.MetadataFilenameUtils.METADATA_PREFIX + "__12__n__uuid1",
+ RemoteSegmentStoreDirectory.MetadataFilenameUtils.getMetadataFilename(12, 23, "uuid1")
+ );
+ }
+
+ public void testGetPrimaryTermGenerationUuid() {
+ String[] filenameTokens = "abc__12__n__uuid_xyz".split(RemoteSegmentStoreDirectory.MetadataFilenameUtils.SEPARATOR);
+ assertEquals(12, RemoteSegmentStoreDirectory.MetadataFilenameUtils.getPrimaryTerm(filenameTokens));
+ assertEquals(23, RemoteSegmentStoreDirectory.MetadataFilenameUtils.getGeneration(filenameTokens));
+ assertEquals("uuid_xyz", RemoteSegmentStoreDirectory.MetadataFilenameUtils.getUuid(filenameTokens));
+ }
+
+ public void testMetadataFilenameComparator() {
+ List metadataFilenames = new ArrayList<>(
+ List.of(
+ "abc__10__20__uuid1",
+ "abc__12__2__uuid2",
+ "pqr__1__1__uuid0",
+ "abc__3__n__uuid3",
+ "abc__10__8__uuid8",
+ "abc__3__a__uuid4",
+ "abc__3__a__uuid5"
+ )
+ );
+ metadataFilenames.sort(RemoteSegmentStoreDirectory.METADATA_FILENAME_COMPARATOR);
+ assertEquals(
+ List.of(
+ "abc__3__a__uuid4",
+ "abc__3__a__uuid5",
+ "abc__3__n__uuid3",
+ "abc__10__8__uuid8",
+ "abc__10__20__uuid1",
+ "abc__12__2__uuid2",
+ "pqr__1__1__uuid0"
+ ),
+ metadataFilenames
+ );
+ }
+
+ public void testInitException() throws IOException {
+ when(remoteMetadataDirectory.listFilesByPrefix(RemoteSegmentStoreDirectory.MetadataFilenameUtils.METADATA_PREFIX)).thenThrow(
+ new IOException("Error")
+ );
+
+ assertThrows(IOException.class, () -> remoteSegmentStoreDirectory.init());
+ }
+
+ public void testInitNoMetadataFile() throws IOException {
+ when(remoteMetadataDirectory.listFilesByPrefix(RemoteSegmentStoreDirectory.MetadataFilenameUtils.METADATA_PREFIX)).thenReturn(
+ List.of()
+ );
+
+ remoteSegmentStoreDirectory.init();
+ Map actualCache = remoteSegmentStoreDirectory
+ .getSegmentsUploadedToRemoteStore();
+
+ assertEquals(Set.of(), actualCache.keySet());
+ }
+
+ private Map getDummyMetadata(String prefix, int commitGeneration) {
+ Map metadata = new HashMap<>();
+ metadata.put(prefix + ".cfe", prefix + ".cfe::" + prefix + ".cfe__qrt::" + randomIntBetween(1000, 5000));
+ metadata.put(prefix + ".cfs", prefix + ".cfs::" + prefix + ".cfs__zxd::" + randomIntBetween(1000, 5000));
+ metadata.put(prefix + ".si", prefix + ".si::" + prefix + ".si__yui::" + randomIntBetween(1000, 5000));
+ metadata.put(
+ "segments_" + commitGeneration,
+ "segments_" + commitGeneration + "::segments_" + commitGeneration + "__exv::" + randomIntBetween(1000, 5000)
+ );
+ return metadata;
+ }
+
+ private void populateMetadata() throws IOException {
+ List metadataFiles = List.of("metadata__1__5__abc", "metadata__1__6__pqr", "metadata__2__1__zxv");
+ when(remoteMetadataDirectory.listFilesByPrefix(RemoteSegmentStoreDirectory.MetadataFilenameUtils.METADATA_PREFIX)).thenReturn(
+ metadataFiles
+ );
+
+ IndexInput indexInput = mock(IndexInput.class);
+ Map dummyMetadata = getDummyMetadata("_0", 1);
+ when(indexInput.readMapOfStrings()).thenReturn(dummyMetadata);
+ when(remoteMetadataDirectory.openInput("metadata__2__1__zxv", IOContext.DEFAULT)).thenReturn(indexInput);
+ }
+
+ public void testInit() throws IOException {
+ populateMetadata();
+
+ when(remoteMetadataDirectory.listFilesByPrefix(RemoteSegmentStoreDirectory.MetadataFilenameUtils.METADATA_PREFIX)).thenReturn(
+ List.of("metadata__1__5__abc", "metadata__1__6__pqr", "metadata__2__1__zxv")
+ );
+
+ remoteSegmentStoreDirectory.init();
+
+ Map actualCache = remoteSegmentStoreDirectory
+ .getSegmentsUploadedToRemoteStore();
+
+ assertEquals(Set.of("_0.cfe", "_0.cfs", "_0.si", "segments_1"), actualCache.keySet());
+ }
+
+ public void testListAll() throws IOException {
+ populateMetadata();
+
+ assertEquals(Set.of("_0.cfe", "_0.cfs", "_0.si", "segments_1"), Set.of(remoteSegmentStoreDirectory.listAll()));
+ }
+
+ public void testDeleteFile() throws IOException {
+ populateMetadata();
+ remoteSegmentStoreDirectory.init();
+
+ Map uploadedSegments = remoteSegmentStoreDirectory
+ .getSegmentsUploadedToRemoteStore();
+
+ assertTrue(uploadedSegments.containsKey("_0.si"));
+ assertFalse(uploadedSegments.containsKey("_100.si"));
+
+ remoteSegmentStoreDirectory.deleteFile("_0.si");
+ remoteSegmentStoreDirectory.deleteFile("_100.si");
+
+ verify(remoteDataDirectory).deleteFile(startsWith("_0.si"));
+ verify(remoteDataDirectory, times(0)).deleteFile(startsWith("_100.si"));
+ assertFalse(uploadedSegments.containsKey("_0.si"));
+ }
+
+ public void testDeleteFileException() throws IOException {
+ populateMetadata();
+ remoteSegmentStoreDirectory.init();
+
+ doThrow(new IOException("Error")).when(remoteDataDirectory).deleteFile(any());
+ assertThrows(IOException.class, () -> remoteSegmentStoreDirectory.deleteFile("_0.si"));
+ }
+
+ public void testFileLenght() throws IOException {
+ populateMetadata();
+ remoteSegmentStoreDirectory.init();
+
+ Map uploadedSegments = remoteSegmentStoreDirectory
+ .getSegmentsUploadedToRemoteStore();
+
+ assertTrue(uploadedSegments.containsKey("_0.si"));
+
+ when(remoteDataDirectory.fileLength(startsWith("_0.si"))).thenReturn(1234L);
+
+ assertEquals(1234L, remoteSegmentStoreDirectory.fileLength("_0.si"));
+ }
+
+ public void testFileLenghtNoSuchFile() throws IOException {
+ populateMetadata();
+ remoteSegmentStoreDirectory.init();
+
+ Map uploadedSegments = remoteSegmentStoreDirectory
+ .getSegmentsUploadedToRemoteStore();
+
+ assertFalse(uploadedSegments.containsKey("_100.si"));
+ assertThrows(NoSuchFileException.class, () -> remoteSegmentStoreDirectory.fileLength("_100.si"));
+ }
+
+ public void testCreateOutput() throws IOException {
+ IndexOutput indexOutput = mock(IndexOutput.class);
+ when(remoteDataDirectory.createOutput(startsWith("abc"), eq(IOContext.DEFAULT))).thenReturn(indexOutput);
+
+ assertEquals(indexOutput, remoteSegmentStoreDirectory.createOutput("abc", IOContext.DEFAULT));
+ }
+
+ public void testCreateOutputException() {
+ when(remoteDataDirectory.createOutput(startsWith("abc"), eq(IOContext.DEFAULT))).thenThrow(new IOException("Error"));
+
+ assertThrows(IOException.class, () -> remoteSegmentStoreDirectory.createOutput("abc", IOContext.DEFAULT));
+ }
+
+ public void testOpenInput() throws IOException {
+ populateMetadata();
+ remoteSegmentStoreDirectory.init();
+
+ IndexInput indexInput = mock(IndexInput.class);
+ when(remoteDataDirectory.openInput(startsWith("_0.si"), eq(IOContext.DEFAULT))).thenReturn(indexInput);
+
+ assertEquals(indexInput, remoteSegmentStoreDirectory.openInput("_0.si", IOContext.DEFAULT));
+ }
+
+ public void testOpenInputNoSuchFile() {
+ assertThrows(NoSuchFileException.class, () -> remoteSegmentStoreDirectory.openInput("_0.si", IOContext.DEFAULT));
+ }
+
+ public void testOpenInputException() throws IOException {
+ populateMetadata();
+ remoteSegmentStoreDirectory.init();
+
+ when(remoteDataDirectory.openInput(startsWith("_0.si"), eq(IOContext.DEFAULT))).thenThrow(new IOException("Error"));
+
+ assertThrows(IOException.class, () -> remoteSegmentStoreDirectory.openInput("_0.si", IOContext.DEFAULT));
+ }
+
+ public void testCopyFrom() throws IOException {
+ String filename = "_100.si";
+ populateMetadata();
+ remoteSegmentStoreDirectory.init();
+
+ Directory storeDirectory = LuceneTestCase.newDirectory();
+ IndexOutput indexOutput = storeDirectory.createOutput(filename, IOContext.DEFAULT);
+ indexOutput.writeString("Hello World!");
+ CodecUtil.writeFooter(indexOutput);
+ indexOutput.close();
+ storeDirectory.sync(List.of(filename));
+
+ assertFalse(remoteSegmentStoreDirectory.getSegmentsUploadedToRemoteStore().containsKey(filename));
+ remoteSegmentStoreDirectory.copyFrom(storeDirectory, filename, filename, IOContext.DEFAULT);
+ assertTrue(remoteSegmentStoreDirectory.getSegmentsUploadedToRemoteStore().containsKey(filename));
+
+ storeDirectory.close();
+ }
+
+ public void testCopyFromException() throws IOException {
+ String filename = "_100.si";
+ Directory storeDirectory = LuceneTestCase.newDirectory();
+ assertFalse(remoteSegmentStoreDirectory.getSegmentsUploadedToRemoteStore().containsKey(filename));
+ doThrow(new IOException("Error")).when(remoteDataDirectory).copyFrom(storeDirectory, filename, filename, IOContext.DEFAULT);
+
+ assertThrows(IOException.class, () -> remoteSegmentStoreDirectory.copyFrom(storeDirectory, filename, filename, IOContext.DEFAULT));
+ assertFalse(remoteSegmentStoreDirectory.getSegmentsUploadedToRemoteStore().containsKey(filename));
+
+ storeDirectory.close();
+ }
+
+ public void testContainsFile() throws IOException {
+ populateMetadata();
+ remoteSegmentStoreDirectory.init();
+
+ // This is not the correct way to add files but the other way is to open up access to fields in UploadedSegmentMetadata
+ Map uploadedSegmentMetadataMap = remoteSegmentStoreDirectory
+ .getSegmentsUploadedToRemoteStore();
+ uploadedSegmentMetadataMap.put(
+ "_100.si",
+ new RemoteSegmentStoreDirectory.UploadedSegmentMetadata("_100.si", "_100.si__uuid1", "1234")
+ );
+
+ assertTrue(remoteSegmentStoreDirectory.containsFile("_100.si", "1234"));
+ assertFalse(remoteSegmentStoreDirectory.containsFile("_100.si", "2345"));
+ assertFalse(remoteSegmentStoreDirectory.containsFile("_200.si", "1234"));
+ }
+
+ public void testUploadMetadataEmpty() throws IOException {
+ Directory storeDirectory = mock(Directory.class);
+ IndexOutput indexOutput = mock(IndexOutput.class);
+ when(storeDirectory.createOutput(startsWith("metadata__12__o"), eq(IOContext.DEFAULT))).thenReturn(indexOutput);
+
+ Collection segmentFiles = List.of("s1", "s2", "s3");
+ assertThrows(NoSuchFileException.class, () -> remoteSegmentStoreDirectory.uploadMetadata(segmentFiles, storeDirectory, 12L, 24L));
+ }
+
+ public void testUploadMetadataNonEmpty() throws IOException {
+ populateMetadata();
+ remoteSegmentStoreDirectory.init();
+
+ Directory storeDirectory = mock(Directory.class);
+ IndexOutput indexOutput = mock(IndexOutput.class);
+ when(storeDirectory.createOutput(startsWith("metadata__12__o"), eq(IOContext.DEFAULT))).thenReturn(indexOutput);
+
+ Collection segmentFiles = List.of("_0.si");
+ remoteSegmentStoreDirectory.uploadMetadata(segmentFiles, storeDirectory, 12L, 24L);
+
+ verify(remoteMetadataDirectory).copyFrom(
+ eq(storeDirectory),
+ startsWith("metadata__12__o"),
+ startsWith("metadata__12__o"),
+ eq(IOContext.DEFAULT)
+ );
+ String metadataString = remoteSegmentStoreDirectory.getSegmentsUploadedToRemoteStore().get("_0.si").toString();
+ verify(indexOutput).writeMapOfStrings(Map.of("_0.si", metadataString));
+ }
+}
diff --git a/server/src/test/java/org/opensearch/index/translog/InternalTranslogManagerTests.java b/server/src/test/java/org/opensearch/index/translog/InternalTranslogManagerTests.java
index 5171f0dfa1d18..234abfba66622 100644
--- a/server/src/test/java/org/opensearch/index/translog/InternalTranslogManagerTests.java
+++ b/server/src/test/java/org/opensearch/index/translog/InternalTranslogManagerTests.java
@@ -47,7 +47,8 @@ public void testRecoveryFromTranslog() throws IOException {
() -> tracker,
translogUUID,
TranslogEventListener.NOOP_TRANSLOG_EVENT_LISTENER,
- () -> {}
+ () -> {},
+ new InternalTranslogFactory()
);
final int docs = randomIntBetween(1, 100);
for (int i = 0; i < docs; i++) {
@@ -85,7 +86,8 @@ public void onBeginTranslogRecovery() {
beginTranslogRecoveryInvoked.set(true);
}
},
- () -> {}
+ () -> {},
+ new InternalTranslogFactory()
);
AtomicInteger opsRecovered = new AtomicInteger();
int opsRecoveredFromTranslog = translogManager.recoverFromTranslog((snapshot) -> {
@@ -122,7 +124,8 @@ public void testTranslogRollsGeneration() throws IOException {
() -> tracker,
translogUUID,
TranslogEventListener.NOOP_TRANSLOG_EVENT_LISTENER,
- () -> {}
+ () -> {},
+ new InternalTranslogFactory()
);
final int docs = randomIntBetween(1, 100);
for (int i = 0; i < docs; i++) {
@@ -150,7 +153,8 @@ public void testTranslogRollsGeneration() throws IOException {
() -> new LocalCheckpointTracker(NO_OPS_PERFORMED, NO_OPS_PERFORMED),
translogUUID,
TranslogEventListener.NOOP_TRANSLOG_EVENT_LISTENER,
- () -> {}
+ () -> {},
+ new InternalTranslogFactory()
);
AtomicInteger opsRecovered = new AtomicInteger();
int opsRecoveredFromTranslog = translogManager.recoverFromTranslog((snapshot) -> {
@@ -183,7 +187,8 @@ public void testTrimOperationsFromTranslog() throws IOException {
() -> tracker,
translogUUID,
TranslogEventListener.NOOP_TRANSLOG_EVENT_LISTENER,
- () -> {}
+ () -> {},
+ new InternalTranslogFactory()
);
final int docs = randomIntBetween(1, 100);
for (int i = 0; i < docs; i++) {
@@ -213,7 +218,8 @@ public void testTrimOperationsFromTranslog() throws IOException {
() -> new LocalCheckpointTracker(NO_OPS_PERFORMED, NO_OPS_PERFORMED),
translogUUID,
TranslogEventListener.NOOP_TRANSLOG_EVENT_LISTENER,
- () -> {}
+ () -> {},
+ new InternalTranslogFactory()
);
AtomicInteger opsRecovered = new AtomicInteger();
int opsRecoveredFromTranslog = translogManager.recoverFromTranslog((snapshot) -> {
@@ -260,7 +266,8 @@ public void onAfterTranslogSync() {
}
}
},
- () -> {}
+ () -> {},
+ new InternalTranslogFactory()
);
translogManagerAtomicReference.set(translogManager);
Engine.Index index = indexForDoc(doc);
diff --git a/server/src/test/java/org/opensearch/indices/replication/OngoingSegmentReplicationsTests.java b/server/src/test/java/org/opensearch/indices/replication/OngoingSegmentReplicationsTests.java
index d42e75871a45a..38c55620e1223 100644
--- a/server/src/test/java/org/opensearch/indices/replication/OngoingSegmentReplicationsTests.java
+++ b/server/src/test/java/org/opensearch/indices/replication/OngoingSegmentReplicationsTests.java
@@ -155,6 +155,9 @@ public void testCancelReplication() throws IOException {
}
public void testMultipleReplicasUseSameCheckpoint() throws IOException {
+ IndexShard secondReplica = newShard(primary.shardId(), false);
+ recoverReplica(secondReplica, primary, true);
+
OngoingSegmentReplications replications = new OngoingSegmentReplications(mockIndicesService, recoverySettings);
final CheckpointInfoRequest request = new CheckpointInfoRequest(
1L,
@@ -172,7 +175,7 @@ public void testMultipleReplicasUseSameCheckpoint() throws IOException {
final CheckpointInfoRequest secondRequest = new CheckpointInfoRequest(
1L,
- replica.routingEntry().allocationId().getId(),
+ secondReplica.routingEntry().allocationId().getId(),
replicaDiscoveryNode,
testCheckpoint
);
@@ -187,6 +190,7 @@ public void testMultipleReplicasUseSameCheckpoint() throws IOException {
assertEquals(0, copyState.refCount());
assertEquals(0, replications.size());
assertEquals(0, replications.cachedCopyStateSize());
+ closeShards(secondReplica);
}
public void testStartCopyWithoutPrepareStep() {
@@ -272,4 +276,40 @@ public void onFailure(Exception e) {
}
});
}
+
+ public void testCancelAllReplicationsForShard() throws IOException {
+ // This tests when primary has multiple ongoing replications.
+ IndexShard replica_2 = newShard(primary.shardId(), false);
+ recoverReplica(replica_2, primary, true);
+
+ OngoingSegmentReplications replications = new OngoingSegmentReplications(mockIndicesService, recoverySettings);
+ final CheckpointInfoRequest request = new CheckpointInfoRequest(
+ 1L,
+ replica.routingEntry().allocationId().getId(),
+ primaryDiscoveryNode,
+ testCheckpoint
+ );
+
+ final CopyState copyState = replications.prepareForReplication(request, mock(FileChunkWriter.class));
+ assertEquals(1, copyState.refCount());
+
+ final CheckpointInfoRequest secondRequest = new CheckpointInfoRequest(
+ 1L,
+ replica_2.routingEntry().allocationId().getId(),
+ replicaDiscoveryNode,
+ testCheckpoint
+ );
+ replications.prepareForReplication(secondRequest, mock(FileChunkWriter.class));
+
+ assertEquals(2, copyState.refCount());
+ assertEquals(2, replications.size());
+ assertEquals(1, replications.cachedCopyStateSize());
+
+ // cancel the primary's ongoing replications.
+ replications.cancel(primary, "Test");
+ assertEquals(0, copyState.refCount());
+ assertEquals(0, replications.size());
+ assertEquals(0, replications.cachedCopyStateSize());
+ closeShards(replica_2);
+ }
}
diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationSourceHandlerTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationSourceHandlerTests.java
index 70061c54d0da2..2c52772649acc 100644
--- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationSourceHandlerTests.java
+++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationSourceHandlerTests.java
@@ -15,7 +15,9 @@
import org.opensearch.OpenSearchException;
import org.opensearch.Version;
import org.opensearch.action.ActionListener;
+import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.node.DiscoveryNode;
+import org.opensearch.common.settings.Settings;
import org.opensearch.index.shard.IndexShard;
import org.opensearch.index.shard.IndexShardTestCase;
import org.opensearch.index.store.StoreFileMetadata;
@@ -41,7 +43,8 @@ public class SegmentReplicationSourceHandlerTests extends IndexShardTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
- primary = newStartedShard(true);
+ final Settings settings = Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, "SEGMENT").put(Settings.EMPTY).build();
+ primary = newStartedShard(true, settings);
replica = newShard(primary.shardId(), false);
recoverReplica(replica, primary, true);
replicaDiscoveryNode = replica.recoveryState().getTargetNode();
@@ -63,6 +66,7 @@ public void testSendFiles() throws IOException {
chunkWriter,
threadPool,
copyState,
+ primary.routingEntry().allocationId().getId(),
5000,
1
);
@@ -100,6 +104,7 @@ public void testSendFiles_emptyRequest() throws IOException {
chunkWriter,
threadPool,
copyState,
+ primary.routingEntry().allocationId().getId(),
5000,
1
);
@@ -138,6 +143,7 @@ public void testSendFileFails() throws IOException {
chunkWriter,
threadPool,
copyState,
+ primary.routingEntry().allocationId().getId(),
5000,
1
);
@@ -175,6 +181,7 @@ public void testReplicationAlreadyRunning() throws IOException {
chunkWriter,
threadPool,
copyState,
+ primary.routingEntry().allocationId().getId(),
5000,
1
);
diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java
index 8b4bda7de50ad..d3a6d1a97dacc 100644
--- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java
+++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java
@@ -13,6 +13,7 @@
import org.mockito.Mockito;
import org.opensearch.OpenSearchException;
import org.opensearch.action.ActionListener;
+import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
import org.opensearch.index.shard.IndexShard;
@@ -50,7 +51,10 @@ public class SegmentReplicationTargetServiceTests extends IndexShardTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
- final Settings settings = Settings.builder().put("node.name", SegmentReplicationTargetServiceTests.class.getSimpleName()).build();
+ final Settings settings = Settings.builder()
+ .put(IndexMetadata.SETTING_REPLICATION_TYPE, "SEGMENT")
+ .put("node.name", SegmentReplicationTargetServiceTests.class.getSimpleName())
+ .build();
final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
final RecoverySettings recoverySettings = new RecoverySettings(settings, clusterSettings);
final TransportService transportService = mock(TransportService.class);
@@ -96,8 +100,8 @@ public void onReplicationFailure(SegmentReplicationState state, OpenSearchExcept
);
final SegmentReplicationTarget spy = Mockito.spy(target);
doAnswer(invocation -> {
- // setting stage to REPLICATING so transition in markAsDone succeeds on listener completion
- target.state().setStage(SegmentReplicationState.Stage.REPLICATING);
+ // set up stage correctly so the transition in markAsDone succeeds on listener completion
+ moveTargetToFinalStage(target);
final ActionListener listener = invocation.getArgument(0);
listener.onResponse(null);
return null;
@@ -119,7 +123,7 @@ public void onReplicationDone(SegmentReplicationState state) {
@Override
public void onReplicationFailure(SegmentReplicationState state, OpenSearchException e, boolean sendShardFailure) {
- assertEquals(SegmentReplicationState.Stage.REPLICATING, state.getStage());
+ assertEquals(SegmentReplicationState.Stage.INIT, state.getStage());
assertEquals(expectedError, e.getCause());
assertTrue(sendShardFailure);
}
@@ -127,8 +131,6 @@ public void onReplicationFailure(SegmentReplicationState state, OpenSearchExcept
);
final SegmentReplicationTarget spy = Mockito.spy(target);
doAnswer(invocation -> {
- // setting stage to REPLICATING so transition in markAsDone succeeds on listener completion
- target.state().setStage(SegmentReplicationState.Stage.REPLICATING);
final ActionListener listener = invocation.getArgument(0);
listener.onFailure(expectedError);
return null;
@@ -204,6 +206,23 @@ public void testNewCheckpoint_validationPassesAndReplicationFails() throws IOExc
closeShard(indexShard, false);
}
+ /**
+ * here we are starting a new shard in PrimaryMode and testing that we don't process a checkpoint on shard when it is in PrimaryMode.
+ */
+ public void testRejectCheckpointOnShardPrimaryMode() throws IOException {
+ SegmentReplicationTargetService spy = spy(sut);
+
+ // Starting a new shard in PrimaryMode.
+ IndexShard primaryShard = newStartedShard(true);
+ IndexShard spyShard = spy(primaryShard);
+ doNothing().when(spy).startReplication(any(), any(), any());
+ spy.onNewCheckpoint(aheadCheckpoint, spyShard);
+
+ // Verify that checkpoint is not processed as shard is in PrimaryMode.
+ verify(spy, times(0)).startReplication(any(), any(), any());
+ closeShards(primaryShard);
+ }
+
public void testReplicationOnDone() throws IOException {
SegmentReplicationTargetService spy = spy(sut);
IndexShard spyShard = spy(indexShard);
@@ -250,4 +269,17 @@ public void testBeforeIndexShardClosed_CancelsOngoingReplications() {
sut.beforeIndexShardClosed(indexShard.shardId(), indexShard, Settings.EMPTY);
verify(spy, times(1)).cancel(any());
}
+
+ /**
+ * Move the {@link SegmentReplicationTarget} object through its {@link SegmentReplicationState.Stage} values in order
+ * until the final, non-terminal stage.
+ */
+ private void moveTargetToFinalStage(SegmentReplicationTarget target) {
+ SegmentReplicationState.Stage[] stageValues = SegmentReplicationState.Stage.values();
+ assertEquals(target.state().getStage(), SegmentReplicationState.Stage.INIT);
+ // Skip the first two stages (DONE and INIT) and iterate until the last value
+ for (int i = 2; i < stageValues.length; i++) {
+ target.state().setStage(stageValues[i]);
+ }
+ }
}
diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetTests.java
index a0944ee249859..11217a46b3c69 100644
--- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetTests.java
+++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetTests.java
@@ -8,29 +8,52 @@
package org.opensearch.indices.replication;
-import org.apache.lucene.index.IndexFileNames;
-import org.apache.lucene.index.IndexFormatTooNewException;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.SegmentInfos;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.IndexFormatTooNewException;
+import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.store.ByteBuffersDataOutput;
import org.apache.lucene.store.ByteBuffersIndexOutput;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.analysis.MockAnalyzer;
+import org.apache.lucene.tests.util.TestUtil;
import org.apache.lucene.util.Version;
import org.junit.Assert;
import org.mockito.Mockito;
+import org.opensearch.ExceptionsHelper;
+import org.opensearch.OpenSearchException;
import org.opensearch.action.ActionListener;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
+import org.opensearch.index.IndexSettings;
import org.opensearch.index.engine.NRTReplicationEngineFactory;
import org.opensearch.index.shard.IndexShard;
import org.opensearch.index.shard.IndexShardTestCase;
+import org.opensearch.index.shard.ShardId;
import org.opensearch.index.store.Store;
import org.opensearch.index.store.StoreFileMetadata;
+import org.opensearch.index.store.StoreTests;
import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint;
import org.opensearch.indices.replication.common.ReplicationType;
+import org.opensearch.test.DummyShardLock;
+import org.opensearch.test.IndexSettingsModule;
+import java.io.FileNotFoundException;
import java.io.IOException;
+import java.nio.file.NoSuchFileException;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.Random;
+import java.util.Arrays;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
@@ -69,7 +92,12 @@ public class SegmentReplicationTargetTests extends IndexShardTestCase {
0
);
- SegmentInfos testSegmentInfos;
+ private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(
+ "index",
+ Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, org.opensearch.Version.CURRENT).build()
+ );
+
+ private SegmentInfos testSegmentInfos;
@Override
public void setUp() throws Exception {
@@ -135,6 +163,7 @@ public void getSegmentFiles(
public void onResponse(Void replicationResponse) {
try {
verify(spyIndexShard, times(1)).finalizeReplication(any(), anyLong());
+ segrepTarget.markAsDone();
} catch (IOException ex) {
Assert.fail();
}
@@ -142,7 +171,7 @@ public void onResponse(Void replicationResponse) {
@Override
public void onFailure(Exception e) {
- logger.error("Unexpected test error", e);
+ logger.error("Unexpected onFailure", e);
Assert.fail();
}
});
@@ -186,6 +215,7 @@ public void onResponse(Void replicationResponse) {
@Override
public void onFailure(Exception e) {
assertEquals(exception, e.getCause().getCause());
+ segrepTarget.fail(new OpenSearchException(e), false);
}
});
}
@@ -228,6 +258,7 @@ public void onResponse(Void replicationResponse) {
@Override
public void onFailure(Exception e) {
assertEquals(exception, e.getCause().getCause());
+ segrepTarget.fail(new OpenSearchException(e), false);
}
});
}
@@ -272,6 +303,7 @@ public void onResponse(Void replicationResponse) {
@Override
public void onFailure(Exception e) {
assertEquals(exception, e.getCause());
+ segrepTarget.fail(new OpenSearchException(e), false);
}
});
}
@@ -316,6 +348,7 @@ public void onResponse(Void replicationResponse) {
@Override
public void onFailure(Exception e) {
assertEquals(exception, e.getCause());
+ segrepTarget.fail(new OpenSearchException(e), false);
}
});
}
@@ -357,14 +390,123 @@ public void onResponse(Void replicationResponse) {
@Override
public void onFailure(Exception e) {
assert (e instanceof IllegalStateException);
+ segrepTarget.fail(new OpenSearchException(e), false);
+ }
+ });
+ }
+
+ /**
+ * This tests ensures that new files generated on primary (due to delete operation) are not considered missing on replica
+ * @throws IOException
+ */
+ public void test_MissingFiles_NotCausingFailure() throws IOException {
+ int docCount = 1 + random().nextInt(10);
+ // Generate a list of MetadataSnapshot containing two elements. The second snapshot contains extra files
+ // generated due to delete operations. These two snapshots can then be used in test to mock the primary shard
+ // snapshot (2nd element which contains delete operations) and replica's existing snapshot (1st element).
+ List storeMetadataSnapshots = generateStoreMetadataSnapshot(docCount);
+
+ SegmentReplicationSource segrepSource = new SegmentReplicationSource() {
+ @Override
+ public void getCheckpointMetadata(
+ long replicationId,
+ ReplicationCheckpoint checkpoint,
+ ActionListener listener
+ ) {
+ listener.onResponse(
+ new CheckpointInfoResponse(checkpoint, storeMetadataSnapshots.get(1), buffer.toArrayCopy(), Set.of(PENDING_DELETE_FILE))
+ );
+ }
+
+ @Override
+ public void getSegmentFiles(
+ long replicationId,
+ ReplicationCheckpoint checkpoint,
+ List filesToFetch,
+ Store store,
+ ActionListener listener
+ ) {
+ listener.onResponse(new GetSegmentFilesResponse(filesToFetch));
+ }
+ };
+ SegmentReplicationTargetService.SegmentReplicationListener segRepListener = mock(
+ SegmentReplicationTargetService.SegmentReplicationListener.class
+ );
+
+ segrepTarget = spy(new SegmentReplicationTarget(repCheckpoint, indexShard, segrepSource, segRepListener));
+ when(segrepTarget.getMetadataSnapshot()).thenReturn(storeMetadataSnapshots.get(0));
+ segrepTarget.startReplication(new ActionListener() {
+ @Override
+ public void onResponse(Void replicationResponse) {
+ logger.info("No error processing checkpoint info");
+ segrepTarget.markAsDone();
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ logger.error("Unexpected onFailure", e);
+ Assert.fail();
}
});
}
+ /**
+ * Generates a list of Store.MetadataSnapshot with two elements where second snapshot has extra files due to delete
+ * operation. A list of snapshots is returned so that identical files have same checksum.
+ * @param docCount
+ * @return
+ * @throws IOException
+ */
+ private List generateStoreMetadataSnapshot(int docCount) throws IOException {
+ List docList = new ArrayList<>();
+ for (int i = 0; i < docCount; i++) {
+ Document document = new Document();
+ String text = new String(new char[] { (char) (97 + i), (char) (97 + i) });
+ document.add(new StringField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
+ document.add(new TextField("str", text, Field.Store.YES));
+ docList.add(document);
+ }
+ long seed = random().nextLong();
+ Random random = new Random(seed);
+ IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random)).setCodec(TestUtil.getDefaultCodec());
+ iwc.setMergePolicy(NoMergePolicy.INSTANCE);
+ iwc.setUseCompoundFile(true);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
+ Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId));
+ IndexWriter writer = new IndexWriter(store.directory(), iwc);
+ for (Document d : docList) {
+ writer.addDocument(d);
+ }
+ writer.commit();
+ Store.MetadataSnapshot storeMetadata = store.getMetadata();
+ // Delete one document to generate .liv file
+ writer.deleteDocuments(new Term("id", Integer.toString(random().nextInt(docCount))));
+ writer.commit();
+ Store.MetadataSnapshot storeMetadataWithDeletes = store.getMetadata();
+ deleteContent(store.directory());
+ writer.close();
+ store.close();
+ return Arrays.asList(storeMetadata, storeMetadataWithDeletes);
+ }
+
+ private static void deleteContent(Directory directory) throws IOException {
+ final String[] files = directory.listAll();
+ final List exceptions = new ArrayList<>();
+ for (String file : files) {
+ try {
+ directory.deleteFile(file);
+ } catch (NoSuchFileException | FileNotFoundException e) {
+ // ignore
+ } catch (IOException e) {
+ exceptions.add(e);
+ }
+ }
+ ExceptionsHelper.rethrowAndSuppress(exceptions);
+ }
+
@Override
public void tearDown() throws Exception {
super.tearDown();
- segrepTarget.markAsDone();
closeShards(spyIndexShard, indexShard);
}
}
diff --git a/server/src/test/java/org/opensearch/search/CreatePitSingleNodeTests.java b/server/src/test/java/org/opensearch/search/CreatePitSingleNodeTests.java
index 1e1720317b7c2..811136d4af973 100644
--- a/server/src/test/java/org/opensearch/search/CreatePitSingleNodeTests.java
+++ b/server/src/test/java/org/opensearch/search/CreatePitSingleNodeTests.java
@@ -14,6 +14,10 @@
import org.opensearch.action.search.CreatePitController;
import org.opensearch.action.search.CreatePitRequest;
import org.opensearch.action.search.CreatePitResponse;
+import org.opensearch.action.search.DeletePitAction;
+import org.opensearch.action.search.DeletePitInfo;
+import org.opensearch.action.search.DeletePitRequest;
+import org.opensearch.action.search.DeletePitResponse;
import org.opensearch.action.search.PitTestsUtil;
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchResponse;
@@ -33,6 +37,8 @@
import java.util.concurrent.ExecutionException;
import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.Matchers.blankOrNullString;
+import static org.hamcrest.Matchers.not;
import static org.opensearch.action.search.PitTestsUtil.assertSegments;
import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
@@ -277,6 +283,24 @@ public void testMaxOpenPitContexts() throws Exception {
)
);
final int maxPitContexts = SearchService.MAX_OPEN_PIT_CONTEXT.get(Settings.EMPTY);
+ DeletePitRequest deletePITRequest = new DeletePitRequest("_all");
+
+ /**
+ * When we invoke delete again, returns success after clearing the remaining readers. Asserting reader context
+ * not found exceptions don't result in failures ( as deletion in one node is successful )
+ */
+ ActionFuture execute1 = client().execute(DeletePitAction.INSTANCE, deletePITRequest);
+ DeletePitResponse deletePITResponse = execute1.get();
+ for (DeletePitInfo deletePitInfo : deletePITResponse.getDeletePitResults()) {
+ assertThat(deletePitInfo.getPitId(), not(blankOrNullString()));
+ assertTrue(deletePitInfo.isSuccessful());
+ }
+ assertEquals(0,service.getActiveContexts());
+ execute = client().execute(CreatePitAction.INSTANCE, request);
+ CreatePitResponse pitResponse = execute.get();
+ assertEquals(1,service.getActiveContexts());
+
+ validatePitStats("index", 0, maxPitContexts, 0);
validatePitStats("index", maxPitContexts, 0, 0);
service.doClose();
validatePitStats("index", 0, maxPitContexts, 0);
diff --git a/server/src/test/java/org/opensearch/search/DocValueFormatTests.java b/server/src/test/java/org/opensearch/search/DocValueFormatTests.java
index 36a6eb3ae87b0..bd0fbfe69960c 100644
--- a/server/src/test/java/org/opensearch/search/DocValueFormatTests.java
+++ b/server/src/test/java/org/opensearch/search/DocValueFormatTests.java
@@ -48,7 +48,7 @@
import java.util.ArrayList;
import java.util.List;
-import static org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils.longEncode;
+import static org.opensearch.search.aggregations.bucket.GeoTileUtils.longEncode;
public class DocValueFormatTests extends OpenSearchTestCase {
diff --git a/server/src/test/java/org/opensearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/opensearch/search/aggregations/AggregationsTests.java
index 421865013a28c..94fb6cded637d 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/AggregationsTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/AggregationsTests.java
@@ -48,8 +48,6 @@
import org.opensearch.search.aggregations.bucket.composite.InternalCompositeTests;
import org.opensearch.search.aggregations.bucket.filter.InternalFilterTests;
import org.opensearch.search.aggregations.bucket.filter.InternalFiltersTests;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoHashGridTests;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileGridTests;
import org.opensearch.search.aggregations.bucket.global.InternalGlobalTests;
import org.opensearch.search.aggregations.bucket.histogram.InternalAutoDateHistogramTests;
import org.opensearch.search.aggregations.bucket.histogram.InternalDateHistogramTests;
@@ -80,7 +78,6 @@
import org.opensearch.search.aggregations.metrics.InternalSumTests;
import org.opensearch.search.aggregations.metrics.InternalAvgTests;
import org.opensearch.search.aggregations.metrics.InternalCardinalityTests;
-import org.opensearch.search.aggregations.metrics.InternalGeoBoundsTests;
import org.opensearch.search.aggregations.metrics.InternalGeoCentroidTests;
import org.opensearch.search.aggregations.metrics.InternalHDRPercentilesRanksTests;
import org.opensearch.search.aggregations.metrics.InternalHDRPercentilesTests;
@@ -142,7 +139,6 @@ private static List> getAggsTests() {
aggsTests.add(new InternalStatsBucketTests());
aggsTests.add(new InternalExtendedStatsTests());
aggsTests.add(new InternalExtendedStatsBucketTests());
- aggsTests.add(new InternalGeoBoundsTests());
aggsTests.add(new InternalGeoCentroidTests());
aggsTests.add(new InternalHistogramTests());
aggsTests.add(new InternalDateHistogramTests());
@@ -159,8 +155,6 @@ private static List> getAggsTests() {
aggsTests.add(new InternalGlobalTests());
aggsTests.add(new InternalFilterTests());
aggsTests.add(new InternalSamplerTests());
- aggsTests.add(new GeoHashGridTests());
- aggsTests.add(new GeoTileGridTests());
aggsTests.add(new InternalRangeTests());
aggsTests.add(new InternalDateRangeTests());
aggsTests.add(new InternalGeoDistanceTests());
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java
index c4a87f3993bb4..9290183ec7312 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java
@@ -32,10 +32,8 @@
package org.opensearch.search.aggregations.bucket.composite;
-import org.opensearch.common.geo.GeoBoundingBoxTests;
import org.opensearch.script.Script;
import org.opensearch.search.aggregations.BaseAggregationTestCase;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils;
import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.opensearch.search.aggregations.bucket.missing.MissingOrder;
import org.opensearch.search.sort.SortOrder;
@@ -74,17 +72,6 @@ private DateHistogramValuesSourceBuilder randomDateHistogramSourceBuilder() {
return histo;
}
- private GeoTileGridValuesSourceBuilder randomGeoTileGridValuesSourceBuilder() {
- GeoTileGridValuesSourceBuilder geoTile = new GeoTileGridValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10));
- if (randomBoolean()) {
- geoTile.precision(randomIntBetween(0, GeoTileUtils.MAX_ZOOM));
- }
- if (randomBoolean()) {
- geoTile.geoBoundingBox(GeoBoundingBoxTests.randomBBox());
- }
- return geoTile;
- }
-
private TermsValuesSourceBuilder randomTermsSourceBuilder() {
TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10));
if (randomBoolean()) {
@@ -118,11 +105,9 @@ private HistogramValuesSourceBuilder randomHistogramSourceBuilder() {
@Override
protected CompositeAggregationBuilder createTestAggregatorBuilder() {
int numSources = randomIntBetween(1, 10);
- numSources = 1;
List> sources = new ArrayList<>();
for (int i = 0; i < numSources; i++) {
- int type = randomIntBetween(0, 3);
- type = 3;
+ int type = randomIntBetween(0, 2);
switch (type) {
case 0:
sources.add(randomTermsSourceBuilder());
@@ -133,9 +118,6 @@ protected CompositeAggregationBuilder createTestAggregatorBuilder() {
case 2:
sources.add(randomHistogramSourceBuilder());
break;
- case 3:
- sources.add(randomGeoTileGridValuesSourceBuilder());
- break;
default:
throw new AssertionError("wrong branch");
}
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
index 88b2323b8adfc..25003e0b84567 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
@@ -32,68 +32,24 @@
package org.opensearch.search.aggregations.bucket.composite;
-import org.apache.lucene.tests.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.DoublePoint;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.InetAddressPoint;
-import org.apache.lucene.document.IntPoint;
-import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.document.LongPoint;
-import org.apache.lucene.document.SortedNumericDocValuesField;
-import org.apache.lucene.document.SortedSetDocValuesField;
-import org.apache.lucene.document.StringField;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
-import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.SortedNumericSortField;
-import org.apache.lucene.search.SortedSetSortField;
import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.NumericUtils;
-import org.apache.lucene.tests.util.TestUtil;
import org.opensearch.OpenSearchParseException;
-import org.opensearch.common.geo.GeoPoint;
-import org.opensearch.common.settings.Settings;
-import org.opensearch.common.text.Text;
-import org.opensearch.common.time.DateFormatter;
-import org.opensearch.common.time.DateFormatters;
-import org.opensearch.index.Index;
-import org.opensearch.index.IndexSettings;
-import org.opensearch.index.mapper.DateFieldMapper;
-import org.opensearch.index.mapper.DocumentMapper;
-import org.opensearch.index.mapper.GeoPointFieldMapper;
-import org.opensearch.index.mapper.IpFieldMapper;
-import org.opensearch.index.mapper.KeywordFieldMapper;
-import org.opensearch.index.mapper.MappedFieldType;
-import org.opensearch.index.mapper.MapperService;
-import org.opensearch.index.mapper.NumberFieldMapper;
import org.opensearch.search.aggregations.Aggregator;
-import org.opensearch.search.aggregations.AggregatorTestCase;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils;
import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.opensearch.search.aggregations.bucket.missing.MissingOrder;
import org.opensearch.search.aggregations.bucket.terms.StringTerms;
import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
+import org.opensearch.search.aggregations.composite.BaseCompositeAggregatorTestCase;
import org.opensearch.search.aggregations.metrics.InternalMax;
import org.opensearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.opensearch.search.aggregations.metrics.TopHits;
import org.opensearch.search.aggregations.metrics.TopHitsAggregationBuilder;
import org.opensearch.search.aggregations.support.ValueType;
import org.opensearch.search.sort.SortOrder;
-import org.opensearch.test.IndexSettingsModule;
-import org.junit.After;
-import org.junit.Before;
import java.io.IOException;
import java.net.InetAddress;
@@ -109,51 +65,14 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
-import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-public class CompositeAggregatorTests extends AggregatorTestCase {
- private static MappedFieldType[] FIELD_TYPES;
-
- @Override
- @Before
- public void setUp() throws Exception {
- super.setUp();
- FIELD_TYPES = new MappedFieldType[8];
- FIELD_TYPES[0] = new KeywordFieldMapper.KeywordFieldType("keyword");
- FIELD_TYPES[1] = new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG);
- FIELD_TYPES[2] = new NumberFieldMapper.NumberFieldType("double", NumberFieldMapper.NumberType.DOUBLE);
- FIELD_TYPES[3] = new DateFieldMapper.DateFieldType("date", DateFormatter.forPattern("yyyy-MM-dd||epoch_millis"));
- FIELD_TYPES[4] = new NumberFieldMapper.NumberFieldType("price", NumberFieldMapper.NumberType.INTEGER);
- FIELD_TYPES[5] = new KeywordFieldMapper.KeywordFieldType("terms");
- FIELD_TYPES[6] = new IpFieldMapper.IpFieldType("ip");
- FIELD_TYPES[7] = new GeoPointFieldMapper.GeoPointFieldType("geo_point");
- }
-
- @Override
- @After
- public void tearDown() throws Exception {
- super.tearDown();
- FIELD_TYPES = null;
- }
- @Override
- protected MapperService mapperServiceMock() {
- MapperService mapperService = mock(MapperService.class);
- DocumentMapper mapper = mock(DocumentMapper.class);
- when(mapper.typeText()).thenReturn(new Text("_doc"));
- when(mapper.type()).thenReturn("_doc");
- when(mapperService.documentMapper()).thenReturn(mapper);
- return mapperService;
- }
+public class CompositeAggregatorTests extends BaseCompositeAggregatorTestCase {
public void testUnmappedFieldWithTerms() throws Exception {
final List>> dataset = new ArrayList<>();
@@ -234,80 +153,6 @@ public void testUnmappedFieldWithTerms() throws Exception {
);
}
- public void testUnmappedFieldWithGeopoint() throws Exception {
- final List>> dataset = new ArrayList<>();
- final String mappedFieldName = "geo_point";
- dataset.addAll(
- Arrays.asList(
- createDocument(mappedFieldName, new GeoPoint(48.934059, 41.610741)),
- createDocument(mappedFieldName, new GeoPoint(-23.065941, 113.610741)),
- createDocument(mappedFieldName, new GeoPoint(90.0, 0.0)),
- createDocument(mappedFieldName, new GeoPoint(37.2343, -115.8067)),
- createDocument(mappedFieldName, new GeoPoint(90.0, 0.0))
- )
- );
-
- // just unmapped = no results
- testSearchCase(
- Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)),
- dataset,
- () -> new CompositeAggregationBuilder("name", Arrays.asList(new GeoTileGridValuesSourceBuilder("unmapped").field("unmapped"))),
- (result) -> assertEquals(0, result.getBuckets().size())
- );
-
- // unmapped missing bucket = one result
- testSearchCase(
- Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)),
- dataset,
- () -> new CompositeAggregationBuilder(
- "name",
- Arrays.asList(new GeoTileGridValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true))
- ),
- (result) -> {
- assertEquals(1, result.getBuckets().size());
- assertEquals("{unmapped=null}", result.afterKey().toString());
- assertEquals("{unmapped=null}", result.getBuckets().get(0).getKeyAsString());
- assertEquals(5L, result.getBuckets().get(0).getDocCount());
- }
- );
-
- // field + unmapped, no missing bucket = no results
- testSearchCase(
- Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)),
- dataset,
- () -> new CompositeAggregationBuilder(
- "name",
- Arrays.asList(
- new GeoTileGridValuesSourceBuilder(mappedFieldName).field(mappedFieldName),
- new GeoTileGridValuesSourceBuilder("unmapped").field("unmapped")
- )
- ),
- (result) -> assertEquals(0, result.getBuckets().size())
- );
-
- // field + unmapped with missing bucket = multiple results
- testSearchCase(
- Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)),
- dataset,
- () -> new CompositeAggregationBuilder(
- "name",
- Arrays.asList(
- new GeoTileGridValuesSourceBuilder(mappedFieldName).field(mappedFieldName),
- new GeoTileGridValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true)
- )
- ),
- (result) -> {
- assertEquals(2, result.getBuckets().size());
- assertEquals("{geo_point=7/64/56, unmapped=null}", result.afterKey().toString());
- assertEquals("{geo_point=7/32/56, unmapped=null}", result.getBuckets().get(0).getKeyAsString());
- assertEquals(2L, result.getBuckets().get(0).getDocCount());
- assertEquals("{geo_point=7/64/56, unmapped=null}", result.getBuckets().get(1).getKeyAsString());
- assertEquals(3L, result.getBuckets().get(1).getDocCount());
- }
- );
-
- }
-
public void testUnmappedFieldWithHistogram() throws Exception {
final List>> dataset = new ArrayList<>();
final String mappedFieldName = "price";
@@ -2483,42 +2328,6 @@ public void testWithIP() throws Exception {
});
}
- public void testWithGeoPoint() throws Exception {
- final List>> dataset = new ArrayList<>();
- dataset.addAll(
- Arrays.asList(
- createDocument("geo_point", new GeoPoint(48.934059, 41.610741)),
- createDocument("geo_point", new GeoPoint(-23.065941, 113.610741)),
- createDocument("geo_point", new GeoPoint(90.0, 0.0)),
- createDocument("geo_point", new GeoPoint(37.2343, -115.8067)),
- createDocument("geo_point", new GeoPoint(90.0, 0.0))
- )
- );
- testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("geo_point")), dataset, () -> {
- GeoTileGridValuesSourceBuilder geoTile = new GeoTileGridValuesSourceBuilder("geo_point").field("geo_point");
- return new CompositeAggregationBuilder("name", Collections.singletonList(geoTile));
- }, (result) -> {
- assertEquals(2, result.getBuckets().size());
- assertEquals("{geo_point=7/64/56}", result.afterKey().toString());
- assertEquals("{geo_point=7/32/56}", result.getBuckets().get(0).getKeyAsString());
- assertEquals(2L, result.getBuckets().get(0).getDocCount());
- assertEquals("{geo_point=7/64/56}", result.getBuckets().get(1).getKeyAsString());
- assertEquals(3L, result.getBuckets().get(1).getDocCount());
- });
-
- testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("geo_point")), dataset, () -> {
- GeoTileGridValuesSourceBuilder geoTile = new GeoTileGridValuesSourceBuilder("geo_point").field("geo_point");
- return new CompositeAggregationBuilder("name", Collections.singletonList(geoTile)).aggregateAfter(
- Collections.singletonMap("geo_point", "7/32/56")
- );
- }, (result) -> {
- assertEquals(1, result.getBuckets().size());
- assertEquals("{geo_point=7/64/56}", result.afterKey().toString());
- assertEquals("{geo_point=7/64/56}", result.getBuckets().get(0).getKeyAsString());
- assertEquals(3L, result.getBuckets().get(0).getDocCount());
- });
- }
-
public void testEarlyTermination() throws Exception {
final List>> dataset = new ArrayList<>();
dataset.addAll(
@@ -2648,193 +2457,4 @@ public void testIndexSortWithDuplicate() throws Exception {
);
}
}
-
- private void testSearchCase(
- List queries,
- List>> dataset,
- Supplier create,
- Consumer verify
- ) throws IOException {
- for (Query query : queries) {
- executeTestCase(false, false, query, dataset, create, verify);
- executeTestCase(false, true, query, dataset, create, verify);
- }
- }
-
- private void executeTestCase(
- boolean forceMerge,
- boolean useIndexSort,
- Query query,
- List>> dataset,
- Supplier create,
- Consumer verify
- ) throws IOException {
- Map types = Arrays.stream(FIELD_TYPES)
- .collect(Collectors.toMap(MappedFieldType::name, Function.identity()));
- CompositeAggregationBuilder aggregationBuilder = create.get();
- Sort indexSort = useIndexSort ? buildIndexSort(aggregationBuilder.sources(), types) : null;
- IndexSettings indexSettings = createIndexSettings(indexSort);
- try (Directory directory = newDirectory()) {
- IndexWriterConfig config = newIndexWriterConfig(random(), new MockAnalyzer(random()));
- if (indexSort != null) {
- config.setIndexSort(indexSort);
- config.setCodec(TestUtil.getDefaultCodec());
- }
- try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config)) {
- Document document = new Document();
- int id = 0;
- for (Map> fields : dataset) {
- document.clear();
- addToDocument(id, document, fields);
- indexWriter.addDocument(document);
- id++;
- }
- if (forceMerge || rarely()) {
- // forceMerge randomly or if the collector-per-leaf testing stuff would break the tests.
- indexWriter.forceMerge(1);
- } else {
- if (dataset.size() > 0) {
- int numDeletes = randomIntBetween(1, 25);
- for (int i = 0; i < numDeletes; i++) {
- id = randomIntBetween(0, dataset.size() - 1);
- indexWriter.deleteDocuments(new Term("id", Integer.toString(id)));
- document.clear();
- addToDocument(id, document, dataset.get(id));
- indexWriter.addDocument(document);
- }
- }
-
- }
- }
- try (IndexReader indexReader = DirectoryReader.open(directory)) {
- IndexSearcher indexSearcher = new IndexSearcher(indexReader);
- InternalComposite composite = searchAndReduce(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES);
- verify.accept(composite);
- }
- }
- }
-
- private static IndexSettings createIndexSettings(Sort sort) {
- Settings.Builder builder = Settings.builder();
- if (sort != null) {
- String[] fields = Arrays.stream(sort.getSort()).map(SortField::getField).toArray(String[]::new);
- String[] orders = Arrays.stream(sort.getSort()).map((o) -> o.getReverse() ? "desc" : "asc").toArray(String[]::new);
- builder.putList("index.sort.field", fields);
- builder.putList("index.sort.order", orders);
- }
- return IndexSettingsModule.newIndexSettings(new Index("_index", "0"), builder.build());
- }
-
- private void addToDocument(int id, Document doc, Map> keys) {
- doc.add(new StringField("id", Integer.toString(id), Field.Store.NO));
- for (Map.Entry> entry : keys.entrySet()) {
- final String name = entry.getKey();
- for (Object value : entry.getValue()) {
- if (value instanceof Integer) {
- doc.add(new SortedNumericDocValuesField(name, (int) value));
- doc.add(new IntPoint(name, (int) value));
- } else if (value instanceof Long) {
- doc.add(new SortedNumericDocValuesField(name, (long) value));
- doc.add(new LongPoint(name, (long) value));
- } else if (value instanceof Double) {
- doc.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong((double) value)));
- doc.add(new DoublePoint(name, (double) value));
- } else if (value instanceof String) {
- doc.add(new SortedSetDocValuesField(name, new BytesRef((String) value)));
- doc.add(new StringField(name, new BytesRef((String) value), Field.Store.NO));
- } else if (value instanceof InetAddress) {
- doc.add(new SortedSetDocValuesField(name, new BytesRef(InetAddressPoint.encode((InetAddress) value))));
- doc.add(new InetAddressPoint(name, (InetAddress) value));
- } else if (value instanceof GeoPoint) {
- GeoPoint point = (GeoPoint) value;
- doc.add(
- new SortedNumericDocValuesField(
- name,
- GeoTileUtils.longEncode(point.lon(), point.lat(), GeoTileGridAggregationBuilder.DEFAULT_PRECISION)
- )
- );
- doc.add(new LatLonPoint(name, point.lat(), point.lon()));
- } else {
- throw new AssertionError("invalid object: " + value.getClass().getSimpleName());
- }
- }
- }
- }
-
- private static Map createAfterKey(Object... fields) {
- assert fields.length % 2 == 0;
- final Map map = new HashMap<>();
- for (int i = 0; i < fields.length; i += 2) {
- String field = (String) fields[i];
- map.put(field, fields[i + 1]);
- }
- return map;
- }
-
- @SuppressWarnings("unchecked")
- private static Map> createDocument(Object... fields) {
- assert fields.length % 2 == 0;
- final Map> map = new HashMap<>();
- for (int i = 0; i < fields.length; i += 2) {
- String field = (String) fields[i];
- if (fields[i + 1] instanceof List) {
- map.put(field, (List) fields[i + 1]);
- } else {
- map.put(field, Collections.singletonList(fields[i + 1]));
- }
- }
- return map;
- }
-
- private static long asLong(String dateTime) {
- return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
- }
-
- private static Sort buildIndexSort(List> sources, Map fieldTypes) {
- List sortFields = new ArrayList<>();
- Map remainingFieldTypes = new HashMap<>(fieldTypes);
- for (CompositeValuesSourceBuilder> source : sources) {
- MappedFieldType type = fieldTypes.remove(source.field());
- remainingFieldTypes.remove(source.field());
- SortField sortField = sortFieldFrom(type);
- if (sortField == null) {
- break;
- }
- sortFields.add(sortField);
- }
- while (remainingFieldTypes.size() > 0 && randomBoolean()) {
- // Add extra unused sorts
- List fields = new ArrayList<>(remainingFieldTypes.keySet());
- Collections.sort(fields);
- String field = fields.get(between(0, fields.size() - 1));
- SortField sortField = sortFieldFrom(remainingFieldTypes.remove(field));
- if (sortField != null) {
- sortFields.add(sortField);
- }
- }
- return sortFields.size() > 0 ? new Sort(sortFields.toArray(new SortField[0])) : null;
- }
-
- private static SortField sortFieldFrom(MappedFieldType type) {
- if (type instanceof KeywordFieldMapper.KeywordFieldType) {
- return new SortedSetSortField(type.name(), false);
- } else if (type instanceof DateFieldMapper.DateFieldType) {
- return new SortedNumericSortField(type.name(), SortField.Type.LONG, false);
- } else if (type instanceof NumberFieldMapper.NumberFieldType) {
- switch (type.typeName()) {
- case "byte":
- case "short":
- case "integer":
- return new SortedNumericSortField(type.name(), SortField.Type.INT, false);
- case "long":
- return new SortedNumericSortField(type.name(), SortField.Type.LONG, false);
- case "float":
- case "double":
- return new SortedNumericSortField(type.name(), SortField.Type.DOUBLE, false);
- default:
- return null;
- }
- }
- return null;
- }
}
diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java
index dfe4034650594..1443208a1d2fc 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java
+++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java
@@ -34,14 +34,15 @@
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.geometry.Rectangle;
+import org.opensearch.search.aggregations.bucket.GeoTileUtils;
import org.opensearch.test.OpenSearchTestCase;
-import static org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils.MAX_ZOOM;
-import static org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils.checkPrecisionRange;
-import static org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils.hashToGeoPoint;
-import static org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils.keyToGeoPoint;
-import static org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils.longEncode;
-import static org.opensearch.search.aggregations.bucket.geogrid.GeoTileUtils.stringEncode;
+import static org.opensearch.search.aggregations.bucket.GeoTileUtils.MAX_ZOOM;
+import static org.opensearch.search.aggregations.bucket.GeoTileUtils.checkPrecisionRange;
+import static org.opensearch.search.aggregations.bucket.GeoTileUtils.hashToGeoPoint;
+import static org.opensearch.search.aggregations.bucket.GeoTileUtils.keyToGeoPoint;
+import static org.opensearch.search.aggregations.bucket.GeoTileUtils.longEncode;
+import static org.opensearch.search.aggregations.bucket.GeoTileUtils.stringEncode;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.containsString;
diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsTests.java
deleted file mode 100644
index e132426680fc8..0000000000000
--- a/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsTests.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/*
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-package org.opensearch.search.aggregations.metrics;
-
-import org.opensearch.search.aggregations.BaseAggregationTestCase;
-
-public class GeoBoundsTests extends BaseAggregationTestCase {
-
- @Override
- protected GeoBoundsAggregationBuilder createTestAggregatorBuilder() {
- GeoBoundsAggregationBuilder factory = new GeoBoundsAggregationBuilder(randomAlphaOfLengthBetween(1, 20));
- String field = randomAlphaOfLengthBetween(3, 20);
- factory.field(field);
- if (randomBoolean()) {
- factory.wrapLongitude(randomBoolean());
- }
- if (randomBoolean()) {
- factory.missing("0,0");
- }
- return factory;
- }
-
-}
diff --git a/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java
index 36a1f91a02887..5df8990ceddf7 100644
--- a/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java
+++ b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java
@@ -44,7 +44,7 @@ public void testParseDeletePitRequestWithInvalidJsonThrowsException() throws Exc
public void testDeletePitWithBody() throws Exception {
SetOnce pitCalled = new SetOnce<>();
try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) {
- @Override
+ // @Override
public void deletePits(DeletePitRequest request, ActionListener listener) {
pitCalled.set(true);
assertThat(request.getPitIds(), hasSize(1));
@@ -66,7 +66,7 @@ public void deletePits(DeletePitRequest request, ActionListener pitCalled = new SetOnce<>();
try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) {
- @Override
+ // @Override
public void deletePits(DeletePitRequest request, ActionListener listener) {
pitCalled.set(true);
assertThat(request.getPitIds(), hasSize(1));
@@ -85,7 +85,7 @@ public void deletePits(DeletePitRequest request, ActionListener pitCalled = new SetOnce<>();
try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) {
- @Override
+ // @Override
public void deletePits(DeletePitRequest request, ActionListener listener) {
pitCalled.set(true);
assertThat(request.getPitIds(), hasSize(1));
@@ -110,7 +110,7 @@ public void deletePits(DeletePitRequest request, ActionListener pitCalled = new SetOnce<>();
try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) {
- @Override
+ // @Override
public void deletePits(DeletePitRequest request, ActionListener listener) {
pitCalled.set(true);
assertThat(request.getPitIds(), hasSize(2));
diff --git a/settings.gradle b/settings.gradle
index 65dc6a13100e2..4c389b5490e7c 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -10,7 +10,7 @@
*/
plugins {
- id "com.gradle.enterprise" version "3.10.3"
+ id "com.gradle.enterprise" version "3.11.1"
}
buildCache {
diff --git a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java
index 249ffcfd0bf6e..b3f062aef4fbe 100644
--- a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java
@@ -141,7 +141,12 @@ protected ReplicationGroup createGroup(int replicas, Settings settings) throws I
}
protected ReplicationGroup createGroup(int replicas, Settings settings, EngineFactory engineFactory) throws IOException {
- IndexMetadata metadata = buildIndexMetadata(replicas, settings, indexMapping);
+ return createGroup(replicas, settings, indexMapping, engineFactory);
+ }
+
+ protected ReplicationGroup createGroup(int replicas, Settings settings, String mappings, EngineFactory engineFactory)
+ throws IOException {
+ IndexMetadata metadata = buildIndexMetadata(replicas, settings, mappings);
return new ReplicationGroup(metadata) {
@Override
protected EngineFactory getEngineFactory(ShardRouting routing) {
diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java
index 7dedc572ff19b..f446538acccbb 100644
--- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java
@@ -90,6 +90,7 @@
import org.opensearch.index.snapshots.IndexShardSnapshotStatus;
import org.opensearch.index.store.Store;
import org.opensearch.index.store.StoreFileMetadata;
+import org.opensearch.index.translog.InternalTranslogFactory;
import org.opensearch.index.translog.Translog;
import org.opensearch.indices.breaker.CircuitBreakerService;
import org.opensearch.indices.breaker.HierarchyCircuitBreakerService;
@@ -555,6 +556,7 @@ protected IndexShard newShard(
globalCheckpointSyncer,
retentionLeaseSyncer,
breakerService,
+ new InternalTranslogFactory(),
checkpointPublisher,
remoteStore
);
diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java
new file mode 100644
index 0000000000000..7d00772913d6e
--- /dev/null
+++ b/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java
@@ -0,0 +1,310 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.aggregations.composite;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.InetAddressPoint;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.document.SortedSetDocValuesField;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.SortedNumericSortField;
+import org.apache.lucene.search.SortedSetSortField;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.analysis.MockAnalyzer;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.NumericUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.text.Text;
+import org.opensearch.common.time.DateFormatter;
+import org.opensearch.common.time.DateFormatters;
+import org.opensearch.index.Index;
+import org.opensearch.index.IndexSettings;
+import org.opensearch.index.mapper.DateFieldMapper;
+import org.opensearch.index.mapper.DocumentMapper;
+import org.opensearch.index.mapper.IpFieldMapper;
+import org.opensearch.index.mapper.KeywordFieldMapper;
+import org.opensearch.index.mapper.MappedFieldType;
+import org.opensearch.index.mapper.MapperService;
+import org.opensearch.index.mapper.NumberFieldMapper;
+import org.opensearch.search.aggregations.AggregatorTestCase;
+import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
+import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
+import org.opensearch.search.aggregations.bucket.composite.InternalComposite;
+import org.opensearch.test.IndexSettingsModule;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Consumer;
+import java.util.function.Function;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+/**
+ * Base class for the Aggregator Tests which are registered under Composite Aggregation.
+ */
+public class BaseCompositeAggregatorTestCase extends AggregatorTestCase {
+
+ protected static List FIELD_TYPES;
+
+ @Override
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+ FIELD_TYPES = new ArrayList<>();
+ FIELD_TYPES.add(new KeywordFieldMapper.KeywordFieldType("keyword"));
+ FIELD_TYPES.add(new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG));
+ FIELD_TYPES.add(new NumberFieldMapper.NumberFieldType("double", NumberFieldMapper.NumberType.DOUBLE));
+ FIELD_TYPES.add(new DateFieldMapper.DateFieldType("date", DateFormatter.forPattern("yyyy-MM-dd||epoch_millis")));
+ FIELD_TYPES.add(new NumberFieldMapper.NumberFieldType("price", NumberFieldMapper.NumberType.INTEGER));
+ FIELD_TYPES.add(new KeywordFieldMapper.KeywordFieldType("terms"));
+ FIELD_TYPES.add(new IpFieldMapper.IpFieldType("ip"));
+ }
+
+ @Override
+ @After
+ public void tearDown() throws Exception {
+ super.tearDown();
+ FIELD_TYPES = null;
+ }
+
+ @Override
+ protected MapperService mapperServiceMock() {
+ MapperService mapperService = mock(MapperService.class);
+ DocumentMapper mapper = mock(DocumentMapper.class);
+ when(mapper.typeText()).thenReturn(new Text("_doc"));
+ when(mapper.type()).thenReturn("_doc");
+ when(mapperService.documentMapper()).thenReturn(mapper);
+ return mapperService;
+ }
+
+ protected static Map> createDocument(Object... fields) {
+ assert fields.length % 2 == 0;
+ final Map> map = new HashMap<>();
+ for (int i = 0; i < fields.length; i += 2) {
+ String field = (String) fields[i];
+ if (fields[i + 1] instanceof List) {
+ map.put(field, (List) fields[i + 1]);
+ } else {
+ map.put(field, Collections.singletonList(fields[i + 1]));
+ }
+ }
+ return map;
+ }
+
+ protected void testSearchCase(
+ List queries,
+ List>> dataset,
+ Supplier create,
+ Consumer verify
+ ) throws IOException {
+ for (Query query : queries) {
+ executeTestCase(false, false, query, dataset, create, verify);
+ executeTestCase(false, true, query, dataset, create, verify);
+ }
+ }
+
+ protected void executeTestCase(
+ boolean forceMerge,
+ boolean useIndexSort,
+ Query query,
+ List>> dataset,
+ Supplier create,
+ Consumer verify
+ ) throws IOException {
+ Map types = FIELD_TYPES.stream().collect(Collectors.toMap(MappedFieldType::name, Function.identity()));
+ CompositeAggregationBuilder aggregationBuilder = create.get();
+ Sort indexSort = useIndexSort ? buildIndexSort(aggregationBuilder.sources(), types) : null;
+ IndexSettings indexSettings = createIndexSettings(indexSort);
+ try (Directory directory = newDirectory()) {
+ IndexWriterConfig config = newIndexWriterConfig(random(), new MockAnalyzer(random()));
+ if (indexSort != null) {
+ config.setIndexSort(indexSort);
+ config.setCodec(TestUtil.getDefaultCodec());
+ }
+ try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config)) {
+ Document document = new Document();
+ int id = 0;
+ for (Map> fields : dataset) {
+ document.clear();
+ addToDocument(id, document, fields);
+ indexWriter.addDocument(document);
+ id++;
+ }
+ if (forceMerge || rarely()) {
+ // forceMerge randomly or if the collector-per-leaf testing stuff would break the tests.
+ indexWriter.forceMerge(1);
+ } else {
+ if (dataset.size() > 0) {
+ int numDeletes = randomIntBetween(1, 25);
+ for (int i = 0; i < numDeletes; i++) {
+ id = randomIntBetween(0, dataset.size() - 1);
+ indexWriter.deleteDocuments(new Term("id", Integer.toString(id)));
+ document.clear();
+ addToDocument(id, document, dataset.get(id));
+ indexWriter.addDocument(document);
+ }
+ }
+
+ }
+ }
+ try (IndexReader indexReader = DirectoryReader.open(directory)) {
+ IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+ InternalComposite composite = searchAndReduce(
+ indexSettings,
+ indexSearcher,
+ query,
+ aggregationBuilder,
+ FIELD_TYPES.toArray(new MappedFieldType[0])
+ );
+ verify.accept(composite);
+ }
+ }
+ }
+
+ protected void addToDocument(int id, Document doc, Map> keys) {
+ doc.add(new StringField("id", Integer.toString(id), Field.Store.NO));
+ for (Map.Entry> entry : keys.entrySet()) {
+ final String name = entry.getKey();
+ for (Object value : entry.getValue()) {
+ if (value instanceof Integer) {
+ doc.add(new SortedNumericDocValuesField(name, (int) value));
+ doc.add(new IntPoint(name, (int) value));
+ } else if (value instanceof Long) {
+ doc.add(new SortedNumericDocValuesField(name, (long) value));
+ doc.add(new LongPoint(name, (long) value));
+ } else if (value instanceof Double) {
+ doc.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong((double) value)));
+ doc.add(new DoublePoint(name, (double) value));
+ } else if (value instanceof String) {
+ doc.add(new SortedSetDocValuesField(name, new BytesRef((String) value)));
+ doc.add(new StringField(name, new BytesRef((String) value), Field.Store.NO));
+ } else if (value instanceof InetAddress) {
+ doc.add(new SortedSetDocValuesField(name, new BytesRef(InetAddressPoint.encode((InetAddress) value))));
+ doc.add(new InetAddressPoint(name, (InetAddress) value));
+ } else {
+ if (!addValueToDocument(doc, name, value)) throw new AssertionError(
+ "invalid object: " + value.getClass().getSimpleName()
+ );
+ }
+ }
+ }
+ }
+
+ /**
+ * Override this function to handle any specific type of value you want to add in the document for doing the
+ * composite aggregation. If you have added another Composite Aggregation Type then you must override this
+ * function so that your field value can be added in the document correctly.
+ *
+ * @param doc {@link Document}
+ * @param name {@link String} Field Name
+ * @param value {@link Object} Field value
+ * @return boolean true or false, based on if value is added or not
+ */
+ protected boolean addValueToDocument(final Document doc, final String name, final Object value) {
+ return false;
+ }
+
+ protected static Sort buildIndexSort(List> sources, Map fieldTypes) {
+ List sortFields = new ArrayList<>();
+ Map remainingFieldTypes = new HashMap<>(fieldTypes);
+ for (CompositeValuesSourceBuilder> source : sources) {
+ MappedFieldType type = fieldTypes.remove(source.field());
+ remainingFieldTypes.remove(source.field());
+ SortField sortField = sortFieldFrom(type);
+ if (sortField == null) {
+ break;
+ }
+ sortFields.add(sortField);
+ }
+ while (remainingFieldTypes.size() > 0 && randomBoolean()) {
+ // Add extra unused sorts
+ List fields = new ArrayList<>(remainingFieldTypes.keySet());
+ Collections.sort(fields);
+ String field = fields.get(between(0, fields.size() - 1));
+ SortField sortField = sortFieldFrom(remainingFieldTypes.remove(field));
+ if (sortField != null) {
+ sortFields.add(sortField);
+ }
+ }
+ return sortFields.size() > 0 ? new Sort(sortFields.toArray(new SortField[0])) : null;
+ }
+
+ protected static SortField sortFieldFrom(MappedFieldType type) {
+ if (type instanceof KeywordFieldMapper.KeywordFieldType) {
+ return new SortedSetSortField(type.name(), false);
+ } else if (type instanceof DateFieldMapper.DateFieldType) {
+ return new SortedNumericSortField(type.name(), SortField.Type.LONG, false);
+ } else if (type instanceof NumberFieldMapper.NumberFieldType) {
+ switch (type.typeName()) {
+ case "byte":
+ case "short":
+ case "integer":
+ return new SortedNumericSortField(type.name(), SortField.Type.INT, false);
+ case "long":
+ return new SortedNumericSortField(type.name(), SortField.Type.LONG, false);
+ case "float":
+ case "double":
+ return new SortedNumericSortField(type.name(), SortField.Type.DOUBLE, false);
+ default:
+ return null;
+ }
+ }
+ return null;
+ }
+
+ protected static IndexSettings createIndexSettings(Sort sort) {
+ Settings.Builder builder = Settings.builder();
+ if (sort != null) {
+ String[] fields = Arrays.stream(sort.getSort()).map(SortField::getField).toArray(String[]::new);
+ String[] orders = Arrays.stream(sort.getSort()).map((o) -> o.getReverse() ? "desc" : "asc").toArray(String[]::new);
+ builder.putList("index.sort.field", fields);
+ builder.putList("index.sort.order", orders);
+ }
+ return IndexSettingsModule.newIndexSettings(new Index("_index", "0"), builder.build());
+ }
+
+ protected static Map createAfterKey(Object... fields) {
+ assert fields.length % 2 == 0;
+ final Map map = new HashMap<>();
+ for (int i = 0; i < fields.length; i += 2) {
+ String field = (String) fields[i];
+ map.put(field, fields[i + 1]);
+ }
+ return map;
+ }
+
+ protected static long asLong(String dateTime) {
+ return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
+ }
+}
diff --git a/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java
index f138de152a488..5325c48e16913 100644
--- a/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java
@@ -68,10 +68,6 @@
import org.opensearch.search.aggregations.bucket.filter.FiltersAggregationBuilder;
import org.opensearch.search.aggregations.bucket.filter.ParsedFilter;
import org.opensearch.search.aggregations.bucket.filter.ParsedFilters;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder;
-import org.opensearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
-import org.opensearch.search.aggregations.bucket.geogrid.ParsedGeoHashGrid;
-import org.opensearch.search.aggregations.bucket.geogrid.ParsedGeoTileGrid;
import org.opensearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.opensearch.search.aggregations.bucket.global.ParsedGlobal;
import org.opensearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder;
@@ -117,7 +113,6 @@
import org.opensearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.opensearch.search.aggregations.metrics.CardinalityAggregationBuilder;
import org.opensearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder;
-import org.opensearch.search.aggregations.metrics.GeoBoundsAggregationBuilder;
import org.opensearch.search.aggregations.metrics.GeoCentroidAggregationBuilder;
import org.opensearch.search.aggregations.metrics.InternalHDRPercentileRanks;
import org.opensearch.search.aggregations.metrics.InternalHDRPercentiles;
@@ -129,7 +124,6 @@
import org.opensearch.search.aggregations.metrics.ParsedAvg;
import org.opensearch.search.aggregations.metrics.ParsedCardinality;
import org.opensearch.search.aggregations.metrics.ParsedExtendedStats;
-import org.opensearch.search.aggregations.metrics.ParsedGeoBounds;
import org.opensearch.search.aggregations.metrics.ParsedGeoCentroid;
import org.opensearch.search.aggregations.metrics.ParsedHDRPercentileRanks;
import org.opensearch.search.aggregations.metrics.ParsedHDRPercentiles;
@@ -261,7 +255,6 @@ public ReduceContext forFinalReduction() {
map.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c));
map.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c));
map.put(ExtendedStatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedExtendedStatsBucket.fromXContent(p, (String) c));
- map.put(GeoBoundsAggregationBuilder.NAME, (p, c) -> ParsedGeoBounds.fromXContent(p, (String) c));
map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c));
map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c));
map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c));
@@ -278,8 +271,6 @@ public ReduceContext forFinalReduction() {
map.put(GlobalAggregationBuilder.NAME, (p, c) -> ParsedGlobal.fromXContent(p, (String) c));
map.put(FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c));
map.put(InternalSampler.PARSER_NAME, (p, c) -> ParsedSampler.fromXContent(p, (String) c));
- map.put(GeoHashGridAggregationBuilder.NAME, (p, c) -> ParsedGeoHashGrid.fromXContent(p, (String) c));
- map.put(GeoTileGridAggregationBuilder.NAME, (p, c) -> ParsedGeoTileGrid.fromXContent(p, (String) c));
map.put(RangeAggregationBuilder.NAME, (p, c) -> ParsedRange.fromXContent(p, (String) c));
map.put(DateRangeAggregationBuilder.NAME, (p, c) -> ParsedDateRange.fromXContent(p, (String) c));
map.put(GeoDistanceAggregationBuilder.NAME, (p, c) -> ParsedGeoDistance.fromXContent(p, (String) c));
diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java
index 8d8df2fec39f9..1ab7785b17f5e 100644
--- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java
@@ -2097,6 +2097,7 @@ protected Collection> getMockPlugins() {
if (addMockGeoShapeFieldMapper()) {
mocks.add(TestGeoShapeFieldMapperPlugin.class);
}
+
return Collections.unmodifiableList(mocks);
}
diff --git a/test/framework/src/main/java/org/opensearch/test/gateway/TestGatewayAllocator.java b/test/framework/src/main/java/org/opensearch/test/gateway/TestGatewayAllocator.java
index 54c92f4d519aa..a36dc26685eb4 100644
--- a/test/framework/src/main/java/org/opensearch/test/gateway/TestGatewayAllocator.java
+++ b/test/framework/src/main/java/org/opensearch/test/gateway/TestGatewayAllocator.java
@@ -43,6 +43,7 @@
import org.opensearch.gateway.ReplicaShardAllocator;
import org.opensearch.gateway.TransportNodesListGatewayStartedShards.NodeGatewayStartedShards;
import org.opensearch.index.shard.ShardId;
+import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint;
import org.opensearch.indices.store.TransportNodesListShardStoreMetadata.NodeStoreFilesMetadata;
import java.util.Collections;
@@ -71,6 +72,7 @@ public class TestGatewayAllocator extends GatewayAllocator {
Map> knownAllocations = new HashMap<>();
DiscoveryNodes currentNodes = DiscoveryNodes.EMPTY_NODES;
+ Map shardIdNodeToReplicationCheckPointMap = new HashMap<>();
PrimaryShardAllocator primaryShardAllocator = new PrimaryShardAllocator() {
@Override
@@ -90,7 +92,8 @@ protected AsyncShardFetch.FetchResult fetchData(ShardR
routing -> new NodeGatewayStartedShards(
currentNodes.get(routing.currentNodeId()),
routing.allocationId().getId(),
- routing.primary()
+ routing.primary(),
+ getReplicationCheckpoint(shardId, routing.currentNodeId())
)
)
);
@@ -99,6 +102,10 @@ protected AsyncShardFetch.FetchResult fetchData(ShardR
}
};
+ private ReplicationCheckpoint getReplicationCheckpoint(ShardId shardId, String nodeName) {
+ return shardIdNodeToReplicationCheckPointMap.getOrDefault(getReplicationCheckPointKey(shardId, nodeName), null);
+ }
+
ReplicaShardAllocator replicaShardAllocator = new ReplicaShardAllocator() {
@Override
protected AsyncShardFetch.FetchResult fetchData(ShardRouting shard, RoutingAllocation allocation) {
@@ -156,4 +163,12 @@ public void allocateUnassigned(
public void addKnownAllocation(ShardRouting shard) {
knownAllocations.computeIfAbsent(shard.currentNodeId(), id -> new HashMap<>()).put(shard.shardId(), shard);
}
+
+ public String getReplicationCheckPointKey(ShardId shardId, String nodeName) {
+ return shardId.toString() + "_" + nodeName;
+ }
+
+ public void addReplicationCheckpoint(ShardId shardId, String nodeName, ReplicationCheckpoint replicationCheckpoint) {
+ shardIdNodeToReplicationCheckPointMap.putIfAbsent(getReplicationCheckPointKey(shardId, nodeName), replicationCheckpoint);
+ }
}