From b977f1edb7f5d5b208e16c9c69a245ff93e125bd Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Fri, 18 Aug 2023 14:05:48 -0700 Subject: [PATCH 1/6] [Spotless] Applying Google Code Format for opensearch directory (pt 2/2) #17 (#1978) * spotless apply for OpenSearch P2. Signed-off-by: Mitchell Gale * Spotlesss apply run Signed-off-by: Mitchell Gale * Addressed PR comments Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * spotless apply Signed-off-by: Mitchell Gale * fixed json formatting in test. Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Guian Gumpac --- build.gradle | 4 +- .../client/OpenSearchNodeClient.java | 59 +- .../client/OpenSearchRestClient.java | 68 +- .../data/type/OpenSearchGeoPointType.java | 5 +- .../data/type/OpenSearchIpType.java | 5 +- .../data/type/OpenSearchTextType.java | 10 +- .../sql/opensearch/data/utils/Content.java | 6 +- .../data/utils/OpenSearchJsonContent.java | 26 +- .../data/value/OpenSearchExprIpValue.java | 3 +- .../data/value/OpenSearchExprTextValue.java | 5 +- .../value/OpenSearchExprValueFactory.java | 187 +++--- .../executor/OpenSearchQueryManager.java | 4 +- .../protector/ResourceMonitorPlan.java | 28 +- .../monitor/OpenSearchMemoryHealthy.java | 21 +- .../monitor/OpenSearchResourceMonitor.java | 18 +- .../request/OpenSearchQueryRequest.java | 74 +-- .../opensearch/request/OpenSearchRequest.java | 25 +- .../request/OpenSearchRequestBuilder.java | 141 ++-- .../request/OpenSearchScrollRequest.java | 54 +- .../system/OpenSearchSystemRequest.java | 5 +- .../response/OpenSearchResponse.java | 174 +++-- .../response/agg/SingleValueParser.java | 7 +- .../opensearch/response/agg/StatsParser.java | 4 +- .../response/agg/TopHitsParser.java | 11 +- .../sql/opensearch/response/agg/Utils.java | 1 + .../opensearch/security/SecurityAccess.java | 5 +- .../setting/OpenSearchSettings.java | 305 +++++---- .../opensearch/storage/OpenSearchIndex.java | 97 ++- .../storage/OpenSearchStorageEngine.java | 8 +- .../storage/scan/OpenSearchIndexScan.java | 34 +- ...OpenSearchIndexScanAggregationBuilder.java | 12 +- .../scan/OpenSearchIndexScanBuilder.java | 30 +- .../scan/OpenSearchIndexScanQueryBuilder.java | 45 +- .../storage/scan/PushDownQueryBuilder.java | 4 +- .../storage/script/StringUtils.java | 2 +- .../script/filter/lucene/LuceneQuery.java | 6 +- .../script/filter/lucene/RangeQuery.java | 16 +- .../script/filter/lucene/TermQuery.java | 5 +- .../filter/lucene/relevance/QueryQuery.java | 8 +- .../lucene/relevance/QueryStringQuery.java | 12 +- .../lucene/relevance/RelevanceQuery.java | 55 +- .../relevance/SimpleQueryStringQuery.java | 8 +- .../lucene/relevance/SingleFieldQuery.java | 20 +- .../lucene/relevance/WildcardQuery.java | 9 +- .../storage/script/sort/SortQueryBuilder.java | 37 +- .../storage/system/OpenSearchSystemIndex.java | 22 +- .../system/OpenSearchSystemIndexScan.java | 13 +- .../system/OpenSearchSystemIndexSchema.java | 88 ++- .../client/OpenSearchNodeClientTest.java | 175 +++-- .../client/OpenSearchRestClientTest.java | 203 +++--- .../OpenSearchExprGeoPointValueTest.java | 1 - .../data/value/OpenSearchExprIpValueTest.java | 1 - .../value/OpenSearchExprTextValueTest.java | 76 ++- .../value/OpenSearchExprValueFactoryTest.java | 616 +++++++++--------- .../executor/OpenSearchQueryManagerTest.java | 35 +- .../executor/ResourceMonitorPlanTest.java | 13 +- .../monitor/OpenSearchMemoryHealthyTest.java | 16 +- .../OpenSearchResourceMonitorTest.java | 20 +- .../request/OpenSearchQueryRequestTest.java | 102 ++- .../request/OpenSearchRequestBuilderTest.java | 247 +++---- .../request/OpenSearchScrollRequestTest.java | 148 ++--- ...enSearchAggregationResponseParserTest.java | 14 +- .../response/OpenSearchResponseTest.java | 121 ++-- .../setting/OpenSearchSettingsTest.java | 38 +- .../storage/OpenSearchIndexTest.java | 133 ++-- .../storage/OpenSearchStorageEngineTest.java | 25 +- ...SearchIndexScanAggregationBuilderTest.java | 7 +- .../OpenSearchIndexScanOptimizationTest.java | 586 ++++++----------- .../OpenSearchIndexScanPaginationTest.java | 46 +- .../storage/scan/OpenSearchIndexScanTest.java | 193 +++--- .../scan/PushDownQueryBuilderTest.java | 19 +- .../script/filter/lucene/QueryStringTest.java | 102 +-- .../script/filter/lucene/QueryTest.java | 107 +-- .../script/filter/lucene/RangeQueryTest.java | 10 +- .../filter/lucene/SimpleQueryStringTest.java | 112 ++-- .../filter/lucene/WildcardQueryTest.java | 43 +- .../relevance/RelevanceQueryBuildTest.java | 31 +- .../relevance/SingleFieldQueryTest.java | 47 +- .../script/sort/SortQueryBuilderTest.java | 55 +- .../system/OpenSearchSystemIndexScanTest.java | 4 +- .../system/OpenSearchSystemIndexTest.java | 29 +- .../sql/opensearch/utils/Utils.java | 11 +- 82 files changed, 2396 insertions(+), 2776 deletions(-) diff --git a/build.gradle b/build.gradle index 2bdc4865bb..2ab7abc42a 100644 --- a/build.gradle +++ b/build.gradle @@ -93,7 +93,9 @@ spotless { 'spark/**/*.java', 'plugin/**/*.java', 'ppl/**/*.java', - 'integ-test/**/*java' + 'integ-test/**/*java', + 'core/**/*.java', + 'opensearch/**/*.java' exclude '**/build/**', '**/build-*/**' } importOrder() diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java index c6d44e2c23..993e092534 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import com.google.common.collect.ImmutableList; @@ -40,9 +39,7 @@ public class OpenSearchNodeClient implements OpenSearchClient { /** Node client provided by OpenSearch container. */ private final NodeClient client; - /** - * Constructor of OpenSearchNodeClient. - */ + /** Constructor of OpenSearchNodeClient. */ public OpenSearchNodeClient(NodeClient client) { this.client = client; } @@ -50,8 +47,8 @@ public OpenSearchNodeClient(NodeClient client) { @Override public boolean exists(String indexName) { try { - IndicesExistsResponse checkExistResponse = client.admin().indices() - .exists(new IndicesExistsRequest(indexName)).actionGet(); + IndicesExistsResponse checkExistResponse = + client.admin().indices().exists(new IndicesExistsRequest(indexName)).actionGet(); return checkExistResponse.isExists(); } catch (Exception e) { throw new IllegalStateException("Failed to check if index [" + indexName + "] exists", e); @@ -83,13 +80,12 @@ public void createIndex(String indexName, Map mappings) { @Override public Map getIndexMappings(String... indexExpression) { try { - GetMappingsResponse mappingsResponse = client.admin().indices() - .prepareGetMappings(indexExpression) - .setLocal(true) - .get(); - return mappingsResponse.mappings().entrySet().stream().collect(Collectors.toUnmodifiableMap( - Map.Entry::getKey, - cursor -> new IndexMapping(cursor.getValue()))); + GetMappingsResponse mappingsResponse = + client.admin().indices().prepareGetMappings(indexExpression).setLocal(true).get(); + return mappingsResponse.mappings().entrySet().stream() + .collect( + Collectors.toUnmodifiableMap( + Map.Entry::getKey, cursor -> new IndexMapping(cursor.getValue()))); } catch (IndexNotFoundException e) { // Re-throw directly to be treated as client error finally throw e; @@ -127,15 +123,11 @@ public Map getIndexMaxResultWindows(String... indexExpression) } } - /** - * TODO: Scroll doesn't work for aggregation. Support aggregation later. - */ + /** TODO: Scroll doesn't work for aggregation. Support aggregation later. */ @Override public OpenSearchResponse search(OpenSearchRequest request) { return request.search( - req -> client.search(req).actionGet(), - req -> client.searchScroll(req).actionGet() - ); + req -> client.search(req).actionGet(), req -> client.searchScroll(req).actionGet()); } /** @@ -145,13 +137,12 @@ public OpenSearchResponse search(OpenSearchRequest request) { */ @Override public List indices() { - final GetIndexResponse indexResponse = client.admin().indices() - .prepareGetIndex() - .setLocal(true) - .get(); + final GetIndexResponse indexResponse = + client.admin().indices().prepareGetIndex().setLocal(true).get(); final Stream aliasStream = ImmutableList.copyOf(indexResponse.aliases().values()).stream() - .flatMap(Collection::stream).map(AliasMetadata::alias); + .flatMap(Collection::stream) + .map(AliasMetadata::alias); return Stream.concat(Arrays.stream(indexResponse.getIndices()), aliasStream) .collect(Collectors.toList()); @@ -164,20 +155,20 @@ public List indices() { */ @Override public Map meta() { - return ImmutableMap.of(META_CLUSTER_NAME, - client.settings().get("cluster.name", "opensearch")); + return ImmutableMap.of(META_CLUSTER_NAME, client.settings().get("cluster.name", "opensearch")); } @Override public void cleanup(OpenSearchRequest request) { - request.clean(scrollId -> { - try { - client.prepareClearScroll().addScrollId(scrollId).get(); - } catch (Exception e) { - throw new IllegalStateException( - "Failed to clean up resources for search request " + request, e); - } - }); + request.clean( + scrollId -> { + try { + client.prepareClearScroll().addScrollId(scrollId).get(); + } catch (Exception e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); + } + }); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java index c27c4bbc30..b6106982a7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import com.google.common.collect.ImmutableList; @@ -49,8 +48,7 @@ public class OpenSearchRestClient implements OpenSearchClient { @Override public boolean exists(String indexName) { try { - return client.indices().exists( - new GetIndexRequest(indexName), RequestOptions.DEFAULT); + return client.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); } catch (IOException e) { throw new IllegalStateException("Failed to check if index [" + indexName + "] exist", e); } @@ -59,8 +57,9 @@ public boolean exists(String indexName) { @Override public void createIndex(String indexName, Map mappings) { try { - client.indices().create( - new CreateIndexRequest(indexName).mapping(mappings), RequestOptions.DEFAULT); + client + .indices() + .create(new CreateIndexRequest(indexName).mapping(mappings), RequestOptions.DEFAULT); } catch (IOException e) { throw new IllegalStateException("Failed to create index [" + indexName + "]", e); } @@ -80,27 +79,29 @@ public Map getIndexMappings(String... indexExpression) { @Override public Map getIndexMaxResultWindows(String... indexExpression) { - GetSettingsRequest request = new GetSettingsRequest() - .indices(indexExpression).includeDefaults(true); + GetSettingsRequest request = + new GetSettingsRequest().indices(indexExpression).includeDefaults(true); try { GetSettingsResponse response = client.indices().getSettings(request, RequestOptions.DEFAULT); Map settings = response.getIndexToSettings(); Map defaultSettings = response.getIndexToDefaultSettings(); Map result = new HashMap<>(); - defaultSettings.forEach((key, value) -> { - Integer maxResultWindow = value.getAsInt("index.max_result_window", null); - if (maxResultWindow != null) { - result.put(key, maxResultWindow); - } - }); - - settings.forEach((key, value) -> { - Integer maxResultWindow = value.getAsInt("index.max_result_window", null); - if (maxResultWindow != null) { - result.put(key, maxResultWindow); - } - }); + defaultSettings.forEach( + (key, value) -> { + Integer maxResultWindow = value.getAsInt("index.max_result_window", null); + if (maxResultWindow != null) { + result.put(key, maxResultWindow); + } + }); + + settings.forEach( + (key, value) -> { + Integer maxResultWindow = value.getAsInt("index.max_result_window", null); + if (maxResultWindow != null) { + result.put(key, maxResultWindow); + } + }); return result; } catch (IOException e) { @@ -126,8 +127,7 @@ public OpenSearchResponse search(OpenSearchRequest request) { throw new IllegalStateException( "Failed to perform scroll operation with request " + req, e); } - } - ); + }); } /** @@ -142,7 +142,8 @@ public List indices() { client.indices().get(new GetIndexRequest(), RequestOptions.DEFAULT); final Stream aliasStream = ImmutableList.copyOf(indexResponse.getAliases().values()).stream() - .flatMap(Collection::stream).map(AliasMetadata::alias); + .flatMap(Collection::stream) + .map(AliasMetadata::alias); return Stream.concat(Arrays.stream(indexResponse.getIndices()), aliasStream) .collect(Collectors.toList()); } catch (IOException e) { @@ -173,16 +174,17 @@ public Map meta() { @Override public void cleanup(OpenSearchRequest request) { - request.clean(scrollId -> { - try { - ClearScrollRequest clearRequest = new ClearScrollRequest(); - clearRequest.addScrollId(scrollId); - client.clearScroll(clearRequest, RequestOptions.DEFAULT); - } catch (IOException e) { - throw new IllegalStateException( - "Failed to clean up resources for search request " + request, e); - } - }); + request.clean( + scrollId -> { + try { + ClearScrollRequest clearRequest = new ClearScrollRequest(); + clearRequest.addScrollId(scrollId); + client.clearScroll(clearRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); + } + }); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java index c2428a59a8..75137973c5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,8 +10,8 @@ import lombok.EqualsAndHashCode; /** - * The type of a geo_point value. See - * doc + * The type of a geo_point value. See doc */ @EqualsAndHashCode(callSuper = false) public class OpenSearchGeoPointType extends OpenSearchDataType { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java index fccafc6caf..22581ec28c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,8 +10,8 @@ import lombok.EqualsAndHashCode; /** - * The type of an ip value. See - * doc + * The type of an ip value. See doc */ @EqualsAndHashCode(callSuper = false) public class OpenSearchIpType extends OpenSearchDataType { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java index 67b7296834..e7e453ca3f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java @@ -15,8 +15,8 @@ import org.opensearch.sql.data.type.ExprType; /** - * The type of a text value. See - * doc + * The type of text value. See doc */ public class OpenSearchTextType extends OpenSearchDataType { @@ -24,8 +24,7 @@ public class OpenSearchTextType extends OpenSearchDataType { // text could have fields // a read-only collection - @EqualsAndHashCode.Exclude - Map fields = ImmutableMap.of(); + @EqualsAndHashCode.Exclude Map fields = ImmutableMap.of(); private OpenSearchTextType() { super(MappingType.Text); @@ -34,6 +33,7 @@ private OpenSearchTextType() { /** * Constructs a Text Type using the passed in fields argument. + * * @param fields The fields to be used to construct the text type. * @return A new OpenSeachTextTypeObject */ @@ -67,7 +67,7 @@ protected OpenSearchDataType cloneEmpty() { } /** - * Text field doesn't have doc value (exception thrown even when you call "get") + * Text field doesn't have doc value (exception thrown even when you call "get")
* Limitation: assume inner field name is always "keyword". */ public static String convertTextToKeyword(String fieldName, ExprType fieldType) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java index 0c3d2aec45..0fbd2d4f98 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java @@ -10,12 +10,12 @@ import org.apache.commons.lang3.tuple.Pair; /** - * * Regardless the underling data format, the {@link Content} define the data in abstract manner. * which could be parsed by ElasticsearchExprValueFactory. There are two major use cases: + * *
    - *
  1. Represent the JSON data retrieve from OpenSearch search response.
  2. - *
  3. Represent the Object data extract from the OpenSearch aggregation response.
  4. + *
  5. Represent the JSON data retrieve from OpenSearch search response. + *
  6. Represent the Object data extract from the OpenSearch aggregation response. *
*/ public interface Content { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java index 61da7c3b74..bdb15428e1 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.utils; import com.fasterxml.jackson.databind.JsonNode; @@ -14,9 +13,7 @@ import lombok.RequiredArgsConstructor; import org.apache.commons.lang3.tuple.Pair; -/** - * The Implementation of Content to represent {@link JsonNode}. - */ +/** The Implementation of Content to represent {@link JsonNode}. */ @RequiredArgsConstructor public class OpenSearchJsonContent implements Content { @@ -68,8 +65,7 @@ public Iterator> map() { final JsonNode mapValue = value(); mapValue .fieldNames() - .forEachRemaining( - field -> map.put(field, new OpenSearchJsonContent(mapValue.get(field)))); + .forEachRemaining(field -> map.put(field, new OpenSearchJsonContent(mapValue.get(field)))); return map.entrySet().iterator(); } @@ -133,33 +129,27 @@ public Pair geoValue() { lat = extractDoubleValue(value.get("lat")); } catch (Exception exception) { throw new IllegalStateException( - "latitude must be number value, but got value: " + value.get( - "lat")); + "latitude must be number value, but got value: " + value.get("lat")); } try { lon = extractDoubleValue(value.get("lon")); } catch (Exception exception) { throw new IllegalStateException( - "longitude must be number value, but got value: " + value.get( - "lon")); + "longitude must be number value, but got value: " + value.get("lon")); } return Pair.of(lat, lon); } else { - throw new IllegalStateException("geo point must in format of {\"lat\": number, \"lon\": " - + "number}"); + throw new IllegalStateException( + "geo point must in format of {\"lat\": number, \"lon\": number}"); } } - /** - * Getter for value. If value is array the whole array is returned. - */ + /** Getter for value. If value is array the whole array is returned. */ private JsonNode value() { return value; } - /** - * Get doubleValue from JsonNode if possible. - */ + /** Get doubleValue from JsonNode if possible. */ private Double extractDoubleValue(JsonNode node) { if (node.isTextual()) { return Double.valueOf(node.textValue()); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java index a17deb7e45..30b3784bfc 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import java.util.Objects; @@ -14,7 +13,7 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchIpType; /** - * OpenSearch IP ExprValue. + * OpenSearch IP ExprValue
* Todo, add this to avoid the unknown value type exception, the implementation will be changed. */ @RequiredArgsConstructor diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java index d093588168..fb696d6b04 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Expression Text Value, it is a extension of the ExprValue by OpenSearch. - */ +/** Expression Text Value, it is a extension of the ExprValue by OpenSearch. */ public class OpenSearchExprTextValue extends ExprStringValue { public OpenSearchExprTextValue(String value) { super(value); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java index 4e3e1ec5c0..22c2ece4a7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; @@ -71,18 +70,15 @@ import org.opensearch.sql.opensearch.data.utils.OpenSearchJsonContent; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; -/** - * Construct ExprValue from OpenSearch response. - */ +/** Construct ExprValue from OpenSearch response. */ public class OpenSearchExprValueFactory { - /** - * The Mapping of Field and ExprType. - */ + /** The Mapping of Field and ExprType. */ private final Map typeMapping; /** - * Extend existing mapping by new data without overwrite. - * Called from aggregation only {@see AggregationQueryBuilder#buildTypeMapping}. + * Extend existing mapping by new data without overwrite. Called from aggregation only {@see + * AggregationQueryBuilder#buildTypeMapping}. + * * @param typeMapping A data type mapping produced by aggregation. */ public void extendTypeMapping(Map typeMapping) { @@ -95,9 +91,7 @@ public void extendTypeMapping(Map typeMapping) { } } - @Getter - @Setter - private OpenSearchAggregationResponseParser parser; + @Getter @Setter private OpenSearchAggregationResponseParser parser; private static final String TOP_PATH = ""; @@ -105,48 +99,62 @@ public void extendTypeMapping(Map typeMapping) { private static final Map> typeActionMap = new ImmutableMap.Builder>() - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), (c, dt) -> new ExprIntegerValue(c.intValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Long), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Long), (c, dt) -> new ExprLongValue(c.longValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Short), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Short), (c, dt) -> new ExprShortValue(c.shortValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Byte), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Byte), (c, dt) -> new ExprByteValue(c.byteValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Float), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Float), (c, dt) -> new ExprFloatValue(c.floatValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Double), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Double), (c, dt) -> new ExprDoubleValue(c.doubleValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Text), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Text), (c, dt) -> new OpenSearchExprTextValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), (c, dt) -> new ExprStringValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Boolean), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Boolean), (c, dt) -> ExprBooleanValue.of(c.booleanValue())) - //Handles the creation of DATE, TIME & DATETIME + // Handles the creation of DATE, TIME & DATETIME .put(OpenSearchDateType.of(TIME), OpenSearchExprValueFactory::createOpenSearchDateType) .put(OpenSearchDateType.of(DATE), OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDateType.of(TIMESTAMP), - OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDateType.of(DATETIME), + .put( + OpenSearchDateType.of(TIMESTAMP), OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), + .put( + OpenSearchDateType.of(DATETIME), OpenSearchExprValueFactory::createOpenSearchDateType) + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), (c, dt) -> new OpenSearchExprIpValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint), - (c, dt) -> new OpenSearchExprGeoPointValue(c.geoValue().getLeft(), - c.geoValue().getRight())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint), + (c, dt) -> + new OpenSearchExprGeoPointValue(c.geoValue().getLeft(), c.geoValue().getRight())) + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary), (c, dt) -> new OpenSearchExprBinaryValue(c.stringValue())) .build(); - /** - * Constructor of OpenSearchExprValueFactory. - */ + /** Constructor of OpenSearchExprValueFactory. */ public OpenSearchExprValueFactory(Map typeMapping) { this.typeMapping = OpenSearchDataType.traverseAndFlatten(typeMapping); } /** + * + * + *
    * The struct construction has the following assumption:
    *  1. The field has OpenSearch Object data type.
    *     See 
@@ -155,19 +163,23 @@ public OpenSearchExprValueFactory(Map typeMapping) {
    *     { "employ",       "STRUCT"  }
    *     { "employ.id",    "INTEGER" }
    *     { "employ.state", "STRING"  }
+   *  
*/ public ExprValue construct(String jsonString, boolean supportArrays) { try { - return parse(new OpenSearchJsonContent(OBJECT_MAPPER.readTree(jsonString)), TOP_PATH, - Optional.of(STRUCT), supportArrays); + return parse( + new OpenSearchJsonContent(OBJECT_MAPPER.readTree(jsonString)), + TOP_PATH, + Optional.of(STRUCT), + supportArrays); } catch (JsonProcessingException e) { throw new IllegalStateException(String.format("invalid json: %s.", jsonString), e); } } /** - * Construct ExprValue from field and its value object. Throw exception if trying - * to construct from field of unsupported type. + * Construct ExprValue from field and its value object. Throw exception if trying to construct + * from field of unsupported type.
* Todo, add IP, GeoPoint support after we have function implementation around it. * * @param field field name @@ -179,11 +191,7 @@ public ExprValue construct(String field, Object value, boolean supportArrays) { } private ExprValue parse( - Content content, - String field, - Optional fieldType, - boolean supportArrays - ) { + Content content, String field, Optional fieldType, boolean supportArrays) { if (content.isNull() || !fieldType.isPresent()) { return ExprNullValue.of(); } @@ -207,16 +215,16 @@ private ExprValue parse( } /** - * In OpenSearch, it is possible field doesn't have type definition in mapping. - * but has empty value. For example, {"empty_field": []}. + * In OpenSearch, it is possible field doesn't have type definition in mapping. but has empty + * value. For example, {"empty_field": []}. */ private Optional type(String field) { return Optional.ofNullable(typeMapping.get(field)); } /** - * Parse value with the first matching formatter into {@link ExprValue} - * with corresponding {@link ExprCoreType}. + * Parse value with the first matching formatter into {@link ExprValue} with corresponding {@link + * ExprCoreType}. * * @param value - time as string * @param dataType - field data type @@ -232,12 +240,12 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da TemporalAccessor accessor = formatter.parse(value); ZonedDateTime zonedDateTime = DateFormatters.from(accessor); switch (returnFormat) { - case TIME: return new ExprTimeValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); - case DATE: return new ExprDateValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); - default: return new ExprTimestampValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); + case TIME: + return new ExprTimeValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); + case DATE: + return new ExprDateValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); + default: + return new ExprTimestampValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); } } catch (IllegalArgumentException ignored) { // nothing to do, try another format @@ -247,19 +255,22 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da // if no formatters are available, try the default formatter try { switch (returnFormat) { - case TIME: return new ExprTimeValue( - DateFormatters.from(STRICT_HOUR_MINUTE_SECOND_FORMATTER.parse(value)).toLocalTime()); - case DATE: return new ExprDateValue( - DateFormatters.from(STRICT_YEAR_MONTH_DAY_FORMATTER.parse(value)).toLocalDate()); - default: return new ExprTimestampValue( - DateFormatters.from(DATE_TIME_FORMATTER.parse(value)).toInstant()); + case TIME: + return new ExprTimeValue( + DateFormatters.from(STRICT_HOUR_MINUTE_SECOND_FORMATTER.parse(value)).toLocalTime()); + case DATE: + return new ExprDateValue( + DateFormatters.from(STRICT_YEAR_MONTH_DAY_FORMATTER.parse(value)).toLocalDate()); + default: + return new ExprTimestampValue( + DateFormatters.from(DATE_TIME_FORMATTER.parse(value)).toInstant()); } } catch (DateTimeParseException ignored) { // ignored } - throw new IllegalArgumentException(String.format( - "Construct %s from \"%s\" failed, unsupported format.", returnFormat, value)); + throw new IllegalArgumentException( + String.format("Construct %s from \"%s\" failed, unsupported format.", returnFormat, value)); } private static ExprValue createOpenSearchDateType(Content value, ExprType type) { @@ -270,8 +281,8 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) var numFormatters = dt.getNumericNamedFormatters(); if (numFormatters.size() > 0 || !dt.hasFormats()) { long epochMillis = 0; - if (numFormatters.contains(DateFormatter.forPattern( - FormatNames.EPOCH_SECOND.getSnakeCaseName()))) { + if (numFormatters.contains( + DateFormatter.forPattern(FormatNames.EPOCH_SECOND.getSnakeCaseName()))) { // no CamelCase for `EPOCH_*` formats epochMillis = value.longValue() * 1000; } else /* EPOCH_MILLIS */ { @@ -279,9 +290,12 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) } Instant instant = Instant.ofEpochMilli(epochMillis); switch ((ExprCoreType) returnFormat) { - case TIME: return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); - case DATE: return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); - default: return new ExprTimestampValue(instant); + case TIME: + return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); + case DATE: + return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); + default: + return new ExprTimestampValue(instant); } } else { // custom format @@ -297,6 +311,7 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) /** * Parse struct content. + * * @param content Content to parse. * @param prefix Prefix for Level of object depth to parse. * @param supportArrays Parsing the whole array if array is type nested. @@ -304,15 +319,23 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) */ private ExprValue parseStruct(Content content, String prefix, boolean supportArrays) { LinkedHashMap result = new LinkedHashMap<>(); - content.map().forEachRemaining(entry -> result.put(entry.getKey(), - parse(entry.getValue(), - makeField(prefix, entry.getKey()), - type(makeField(prefix, entry.getKey())), supportArrays))); + content + .map() + .forEachRemaining( + entry -> + result.put( + entry.getKey(), + parse( + entry.getValue(), + makeField(prefix, entry.getKey()), + type(makeField(prefix, entry.getKey())), + supportArrays))); return new ExprTupleValue(result); } /** * Parse array content. Can also parse nested which isn't necessarily an array. + * * @param content Content to parse. * @param prefix Prefix for Level of object depth to parse. * @param type Type of content parsing. @@ -320,32 +343,31 @@ private ExprValue parseStruct(Content content, String prefix, boolean supportArr * @return Value parsed from content. */ private ExprValue parseArray( - Content content, - String prefix, - ExprType type, - boolean supportArrays - ) { + Content content, String prefix, ExprType type, boolean supportArrays) { List result = new ArrayList<>(); // ARRAY is mapped to nested but can take the json structure of an Object. if (content.objectValue() instanceof ObjectNode) { result.add(parseStruct(content, prefix, supportArrays)); // non-object type arrays are only supported when parsing inner_hits of OS response. - } else if ( - !(type instanceof OpenSearchDataType + } else if (!(type instanceof OpenSearchDataType && ((OpenSearchDataType) type).getExprType().equals(ARRAY)) && !supportArrays) { return parseInnerArrayValue(content.array().next(), prefix, type, supportArrays); } else { - content.array().forEachRemaining(v -> { - result.add(parseInnerArrayValue(v, prefix, type, supportArrays)); - }); + content + .array() + .forEachRemaining( + v -> { + result.add(parseInnerArrayValue(v, prefix, type, supportArrays)); + }); } return new ExprCollectionValue(result); } /** * Parse inner array value. Can be object type and recurse continues. + * * @param content Array index being parsed. * @param prefix Prefix for value. * @param type Type of inner array value. @@ -353,11 +375,7 @@ private ExprValue parseArray( * @return Inner array value. */ private ExprValue parseInnerArrayValue( - Content content, - String prefix, - ExprType type, - boolean supportArrays - ) { + Content content, String prefix, ExprType type, boolean supportArrays) { if (type instanceof OpenSearchIpType || type instanceof OpenSearchBinaryType || type instanceof OpenSearchDateType @@ -382,6 +400,7 @@ private ExprValue parseInnerArrayValue( /** * Make complete path string for field. + * * @param path Path of field. * @param field Field to append to path. * @return Field appended to path level. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java index 9c6fcdb825..dbe91dc398 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java @@ -18,9 +18,7 @@ import org.opensearch.sql.executor.execution.AbstractPlan; import org.opensearch.threadpool.ThreadPool; -/** - * QueryManager implemented in OpenSearch cluster. - */ +/** QueryManager implemented in OpenSearch cluster. */ @RequiredArgsConstructor public class OpenSearchQueryManager implements QueryManager { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java index 4c02affc5e..e3bc48ba72 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import java.io.IOException; @@ -19,36 +18,23 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; -/** - * A PhysicalPlan which will run the delegate plan in resource protection manner. - */ +/** A PhysicalPlan which will run the delegate plan in resource protection manner. */ @ToString @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class ResourceMonitorPlan extends PhysicalPlan implements SerializablePlan { - /** - * How many method calls to delegate's next() to perform resource check once. - */ + /** How many method calls to delegate's next() to perform resource check once. */ public static final long NUMBER_OF_NEXT_CALL_TO_CHECK = 1000; - /** - * Delegated PhysicalPlan. - */ + /** Delegated PhysicalPlan. */ private final PhysicalPlan delegate; - /** - * ResourceMonitor. - */ - @ToString.Exclude - private final ResourceMonitor monitor; - - /** - * Count how many calls to delegate's next() already. - */ - @EqualsAndHashCode.Exclude - private long nextCallCount = 0L; + /** ResourceMonitor. */ + @ToString.Exclude private final ResourceMonitor monitor; + /** Count how many calls to delegate's next() already. */ + @EqualsAndHashCode.Exclude private long nextCallCount = 0L; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java index c0a4aeb0b7..4b7b6c5dcb 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import com.google.common.annotations.VisibleForTesting; @@ -11,9 +10,7 @@ import lombok.NoArgsConstructor; import lombok.extern.log4j.Log4j2; -/** - * OpenSearch Memory Monitor. - */ +/** OpenSearch Memory Monitor. */ @Log4j2 public class OpenSearchMemoryHealthy { private final RandomFail randomFail; @@ -25,16 +22,12 @@ public OpenSearchMemoryHealthy() { } @VisibleForTesting - public OpenSearchMemoryHealthy( - RandomFail randomFail, - MemoryUsage memoryUsage) { + public OpenSearchMemoryHealthy(RandomFail randomFail, MemoryUsage memoryUsage) { this.randomFail = randomFail; this.memoryUsage = memoryUsage; } - /** - * Is Memory Healthy. Calculate based on the current heap memory usage. - */ + /** Is Memory Healthy. Calculate based on the current heap memory usage. */ public boolean isMemoryHealthy(long limitBytes) { final long memoryUsage = this.memoryUsage.usage(); log.debug("Memory usage:{}, limit:{}", memoryUsage, limitBytes); @@ -66,12 +59,8 @@ public long usage() { } @NoArgsConstructor - public static class MemoryUsageExceedFastFailureException extends RuntimeException { - - } + public static class MemoryUsageExceedFastFailureException extends RuntimeException {} @NoArgsConstructor - public static class MemoryUsageExceedException extends RuntimeException { - - } + public static class MemoryUsageExceedException extends RuntimeException {} } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java index 5ed82c7a5d..3990fef7b7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import io.github.resilience4j.core.IntervalFunction; @@ -17,7 +16,7 @@ /** * {@link ResourceMonitor} implementation on Elasticsearch. When the heap memory usage exceeds - * certain threshold, the monitor is not healthy. + * certain threshold, the monitor is not healthy.
* Todo, add metrics. */ @Log4j2 @@ -26,20 +25,15 @@ public class OpenSearchResourceMonitor extends ResourceMonitor { private final Retry retry; private final OpenSearchMemoryHealthy memoryMonitor; - /** - * Constructor of ElasticsearchCircuitBreaker. - */ - public OpenSearchResourceMonitor( - Settings settings, - OpenSearchMemoryHealthy memoryMonitor) { + /** Constructor. */ + public OpenSearchResourceMonitor(Settings settings, OpenSearchMemoryHealthy memoryMonitor) { this.settings = settings; RetryConfig config = RetryConfig.custom() .maxAttempts(3) .intervalFunction(IntervalFunction.ofExponentialRandomBackoff(1000)) .retryExceptions(OpenSearchMemoryHealthy.MemoryUsageExceedException.class) - .ignoreExceptions( - OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class) + .ignoreExceptions(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class) .build(); retry = Retry.of("mem", config); this.memoryMonitor = memoryMonitor; @@ -55,9 +49,7 @@ public boolean isHealthy() { try { ByteSizeValue limit = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); Supplier booleanSupplier = - Retry.decorateSupplier(retry, - () -> memoryMonitor - .isMemoryHealthy(limit.getBytes())); + Retry.decorateSupplier(retry, () -> memoryMonitor.isMemoryHealthy(limit.getBytes())); return booleanSupplier.get(); } catch (Exception e) { return false; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java index 919596eee2..6447a3ff65 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -33,49 +32,31 @@ @ToString public class OpenSearchQueryRequest implements OpenSearchRequest { - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final IndexName indexName; - /** - * Search request source builder. - */ + /** Search request source builder. */ private final SearchSourceBuilder sourceBuilder; - /** - * OpenSearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + /** OpenSearchExprValueFactory. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; + /** List of includes expected in the response. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final List includes; - /** - * List of includes expected in the response. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final List includes; - - /** - * Indicate the search already done. - */ + /** Indicate the search already done. */ private boolean searchDone = false; - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(String indexName, int size, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + String indexName, int size, OpenSearchExprValueFactory factory, List includes) { this(new IndexName(indexName), size, factory, includes); } - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(IndexName indexName, int size, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + IndexName indexName, int size, OpenSearchExprValueFactory factory, List includes) { this.indexName = indexName; this.sourceBuilder = new SearchSourceBuilder(); sourceBuilder.from(0); @@ -85,11 +66,12 @@ public OpenSearchQueryRequest(IndexName indexName, int size, this.includes = includes; } - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(IndexName indexName, SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + IndexName indexName, + SearchSourceBuilder sourceBuilder, + OpenSearchExprValueFactory factory, + List includes) { this.indexName = indexName; this.sourceBuilder = sourceBuilder; this.exprValueFactory = factory; @@ -97,22 +79,24 @@ public OpenSearchQueryRequest(IndexName indexName, SearchSourceBuilder sourceBui } @Override - public OpenSearchResponse search(Function searchAction, - Function scrollAction) { + public OpenSearchResponse search( + Function searchAction, + Function scrollAction) { if (searchDone) { return new OpenSearchResponse(SearchHits.empty(), exprValueFactory, includes); } else { searchDone = true; return new OpenSearchResponse( - searchAction.apply(new SearchRequest() - .indices(indexName.getIndexNames()) - .source(sourceBuilder)), exprValueFactory, includes); + searchAction.apply( + new SearchRequest().indices(indexName.getIndexNames()).source(sourceBuilder)), + exprValueFactory, + includes); } } @Override public void clean(Consumer cleanAction) { - //do nothing. + // do nothing. } @Override @@ -122,7 +106,7 @@ public boolean hasAnotherBatch() { @Override public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("OpenSearchQueryRequest serialization " - + "is not implemented."); + throw new UnsupportedOperationException( + "OpenSearchQueryRequest serialization is not implemented."); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java index 5c9d0033c1..f775d55296 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -20,14 +19,10 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.response.OpenSearchResponse; -/** - * OpenSearch search request. - */ +/** OpenSearch search request. */ public interface OpenSearchRequest extends Writeable { - /** - * Default query timeout in minutes. - */ + /** Default query timeout in minutes. */ TimeValue DEFAULT_QUERY_TIMEOUT = TimeValue.timeValueMinutes(1L); /** @@ -37,8 +32,9 @@ public interface OpenSearchRequest extends Writeable { * @param scrollAction scroll search action. * @return OpenSearchResponse. */ - OpenSearchResponse search(Function searchAction, - Function scrollAction); + OpenSearchResponse search( + Function searchAction, + Function scrollAction); /** * Apply the cleanAction on request. @@ -49,21 +45,20 @@ OpenSearchResponse search(Function searchAction, /** * Get the OpenSearchExprValueFactory. + * * @return OpenSearchExprValueFactory. */ OpenSearchExprValueFactory getExprValueFactory(); /** * Check if there is more data to get from OpenSearch. - * @return True if calling {@ref OpenSearchClient.search} with this request will - * return non-empty response. + * + * @return True if calling {@ref OpenSearchClient.search} with this request will return non-empty + * response. */ boolean hasAnotherBatch(); - /** - * OpenSearch Index Name. - * Indices are separated by ",". - */ + /** OpenSearch Index Name. Indices are separated by ",". */ @EqualsAndHashCode class IndexName implements Writeable { private static final String COMMA = ","; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java index 80259f15d3..1df3dcb183 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static java.util.stream.Collectors.mapping; @@ -47,47 +46,36 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; -/** - * OpenSearch search request builder. - */ +/** OpenSearch search request builder. */ @EqualsAndHashCode @Getter @ToString public class OpenSearchRequestBuilder { - /** - * Search request source builder. - */ + /** Search request source builder. */ private final SearchSourceBuilder sourceBuilder; - /** - * Query size of the request -- how many rows will be returned. - */ + /** Query size of the request -- how many rows will be returned. */ private int requestedTotalSize; - /** - * Size of each page request to return. - */ + /** Size of each page request to return. */ private Integer pageSize = null; - /** - * OpenSearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + /** OpenSearchExprValueFactory. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; + private int startFrom = 0; - /** - * Constructor. - */ - public OpenSearchRequestBuilder(int requestedTotalSize, - OpenSearchExprValueFactory exprValueFactory) { + /** Constructor. */ + public OpenSearchRequestBuilder( + int requestedTotalSize, OpenSearchExprValueFactory exprValueFactory) { this.requestedTotalSize = requestedTotalSize; - this.sourceBuilder = new SearchSourceBuilder() - .from(startFrom) - .timeout(OpenSearchRequest.DEFAULT_QUERY_TIMEOUT) - .trackScores(false); + this.sourceBuilder = + new SearchSourceBuilder() + .from(startFrom) + .timeout(OpenSearchRequest.DEFAULT_QUERY_TIMEOUT) + .trackScores(false); this.exprValueFactory = exprValueFactory; } @@ -96,13 +84,11 @@ public OpenSearchRequestBuilder(int requestedTotalSize, * * @return query request or scroll request */ - public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, - int maxResultWindow, TimeValue scrollTimeout) { + public OpenSearchRequest build( + OpenSearchRequest.IndexName indexName, int maxResultWindow, TimeValue scrollTimeout) { int size = requestedTotalSize; FetchSourceContext fetchSource = this.sourceBuilder.fetchSource(); - List includes = fetchSource != null - ? Arrays.asList(fetchSource.includes()) - : List.of(); + List includes = fetchSource != null ? Arrays.asList(fetchSource.includes()) : List.of(); if (pageSize == null) { if (startFrom + size > maxResultWindow) { sourceBuilder.size(maxResultWindow - startFrom); @@ -118,12 +104,11 @@ public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, throw new UnsupportedOperationException("Non-zero offset is not supported with pagination"); } sourceBuilder.size(pageSize); - return new OpenSearchScrollRequest(indexName, scrollTimeout, - sourceBuilder, exprValueFactory, includes); + return new OpenSearchScrollRequest( + indexName, scrollTimeout, sourceBuilder, exprValueFactory, includes); } } - boolean isBoolFilterQuery(QueryBuilder current) { return (current instanceof BoolQueryBuilder); } @@ -131,7 +116,7 @@ boolean isBoolFilterQuery(QueryBuilder current) { /** * Push down query to DSL request. * - * @param query query request + * @param query query request */ public void pushDownFilter(QueryBuilder query) { QueryBuilder current = sourceBuilder.query(); @@ -142,9 +127,7 @@ public void pushDownFilter(QueryBuilder query) { if (isBoolFilterQuery(current)) { ((BoolQueryBuilder) current).filter(query); } else { - sourceBuilder.query(QueryBuilders.boolQuery() - .filter(current) - .filter(query)); + sourceBuilder.query(QueryBuilders.boolQuery().filter(current).filter(query)); } } @@ -181,9 +164,7 @@ public void pushDownSort(List> sortBuilders) { } } - /** - * Pushdown size (limit) and from (offset) to DSL request. - */ + /** Pushdown size (limit) and from (offset) to DSL request. */ public void pushDownLimit(Integer limit, Integer offset) { requestedTotalSize = limit; startFrom = offset; @@ -200,6 +181,7 @@ public void pushDownPageSize(int pageSize) { /** * Add highlight to DSL requests. + * * @param field name of the field to highlight */ public void pushDownHighlight(String field, Map arguments) { @@ -208,32 +190,34 @@ public void pushDownHighlight(String field, Map arguments) { // OS does not allow duplicates of highlight fields if (sourceBuilder.highlighter().fields().stream() .anyMatch(f -> f.name().equals(unquotedField))) { - throw new SemanticCheckException(String.format( - "Duplicate field %s in highlight", field)); + throw new SemanticCheckException(String.format("Duplicate field %s in highlight", field)); } sourceBuilder.highlighter().field(unquotedField); } else { - HighlightBuilder highlightBuilder = - new HighlightBuilder().field(unquotedField); + HighlightBuilder highlightBuilder = new HighlightBuilder().field(unquotedField); sourceBuilder.highlighter(highlightBuilder); } // lastFieldIndex denotes previously set highlighter with field parameter int lastFieldIndex = sourceBuilder.highlighter().fields().size() - 1; if (arguments.containsKey("pre_tags")) { - sourceBuilder.highlighter().fields().get(lastFieldIndex) + sourceBuilder + .highlighter() + .fields() + .get(lastFieldIndex) .preTags(arguments.get("pre_tags").toString()); } if (arguments.containsKey("post_tags")) { - sourceBuilder.highlighter().fields().get(lastFieldIndex) + sourceBuilder + .highlighter() + .fields() + .get(lastFieldIndex) .postTags(arguments.get("post_tags").toString()); } } - /** - * Push down project list to DSL requests. - */ + /** Push down project list to DSL requests. */ public void pushDownProjects(Set projects) { sourceBuilder.fetchSource( projects.stream().map(ReferenceExpression::getAttr).distinct().toArray(String[]::new), @@ -254,21 +238,22 @@ private boolean isSortByDocOnly() { /** * Push down nested to sourceBuilder. + * * @param nestedArgs : Nested arguments to push down. */ public void pushDownNested(List> nestedArgs) { initBoolQueryFilter(); List nestedQueries = extractNestedQueries(query()); - groupFieldNamesByPath(nestedArgs).forEach( - (path, fieldNames) -> - buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path)) - ); + groupFieldNamesByPath(nestedArgs) + .forEach( + (path, fieldNames) -> + buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path))); } /** - * InnerHit must be added to the NestedQueryBuilder. We need to extract - * the nested queries currently in the query if there is already a filter - * push down with nested query. + * InnerHit must be added to the NestedQueryBuilder. We need to extract the nested queries + * currently in the query if there is already a filter push down with nested query. + * * @param query : current query. * @return : grouped nested queries currently in query. */ @@ -289,9 +274,7 @@ public int getMaxResponseSize() { return pageSize == null ? requestedTotalSize : pageSize; } - /** - * Initialize bool query for push down. - */ + /** Initialize bool query for push down. */ private void initBoolQueryFilter() { if (sourceBuilder.query() == null) { sourceBuilder.query(QueryBuilders.boolQuery()); @@ -304,44 +287,42 @@ private void initBoolQueryFilter() { /** * Map all field names in nested queries that use same path. + * * @param fields : Fields for nested queries. * @return : Map of path and associated field names. */ private Map> groupFieldNamesByPath( List> fields) { // TODO filter out reverse nested when supported - .filter(not(isReverseNested())) - return fields.stream().collect( - Collectors.groupingBy( - m -> m.get("path").toString(), - mapping( - m -> m.get("field").toString(), - toList() - ) - ) - ); + return fields.stream() + .collect( + Collectors.groupingBy( + m -> m.get("path").toString(), mapping(m -> m.get("field").toString(), toList()))); } /** * Build inner hits portion to nested query. + * * @param paths : Set of all paths used in nested queries. * @param query : Current pushDown query. */ private void buildInnerHit(List paths, NestedQueryBuilder query) { - query.innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, paths.toArray(new String[0]), null) - )); + query.innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, paths.toArray(new String[0]), null))); } /** - * We need to group nested queries with same path for adding new fields with same path of - * inner hits. If we try to add additional inner hits with same path we get an OS error. + * We need to group nested queries with same path for adding new fields with same path of inner + * hits. If we try to add additional inner hits with same path we get an OS error. + * * @param nestedQueries Current list of nested queries in query. * @param path path comparing with current nested queries. * @return Query with same path or new empty nested query. */ private NestedQueryBuilder findNestedQueryWithSamePath( - List nestedQueries, String path - ) { + List nestedQueries, String path) { return nestedQueries.stream() .filter(query -> isSamePath(path, query)) .findAny() @@ -350,6 +331,7 @@ private NestedQueryBuilder findNestedQueryWithSamePath( /** * Check if is nested query is of the same path value. + * * @param path Value of path to compare with nested query. * @param query nested query builder to compare with path. * @return true if nested query has same path. @@ -358,9 +340,7 @@ private boolean isSamePath(String path, NestedQueryBuilder query) { return nestedQuery(path, query.query(), query.scoreMode()).equals(query); } - /** - * Create a nested query with match all filter to place inner hits. - */ + /** Create a nested query with match all filter to place inner hits. */ private Supplier createEmptyNestedQuery(String path) { return () -> { NestedQueryBuilder nestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); @@ -371,6 +351,7 @@ private Supplier createEmptyNestedQuery(String path) { /** * Return current query. + * * @return : Current source builder query. */ private BoolQueryBuilder query() { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java index 34e8fcd096..c9490f0767 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -41,62 +40,56 @@ public class OpenSearchScrollRequest implements OpenSearchRequest { /** * Search request used to initiate paged (scrolled) search. Not needed to get subsequent pages. */ - @EqualsAndHashCode.Exclude - private final transient SearchRequest initialSearchRequest; + @EqualsAndHashCode.Exclude private final transient SearchRequest initialSearchRequest; + /** Scroll context timeout. */ private final TimeValue scrollTimeout; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final IndexName indexName; /** Index name. */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; /** * Scroll id which is set after first request issued. Because OpenSearchClient is shared by * multiple threads so this state has to be maintained here. */ - @Setter - @Getter - private String scrollId = NO_SCROLL_ID; + @Setter @Getter private String scrollId = NO_SCROLL_ID; public static final String NO_SCROLL_ID = ""; - @EqualsAndHashCode.Exclude - private boolean needClean = true; + @EqualsAndHashCode.Exclude private boolean needClean = true; - @Getter - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final List includes; + @Getter @EqualsAndHashCode.Exclude @ToString.Exclude private final List includes; /** Constructor. */ - public OpenSearchScrollRequest(IndexName indexName, - TimeValue scrollTimeout, - SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + public OpenSearchScrollRequest( + IndexName indexName, + TimeValue scrollTimeout, + SearchSourceBuilder sourceBuilder, + OpenSearchExprValueFactory exprValueFactory, + List includes) { this.indexName = indexName; this.scrollTimeout = scrollTimeout; this.exprValueFactory = exprValueFactory; - this.initialSearchRequest = new SearchRequest() - .indices(indexName.getIndexNames()) - .scroll(scrollTimeout) - .source(sourceBuilder); + this.initialSearchRequest = + new SearchRequest() + .indices(indexName.getIndexNames()) + .scroll(scrollTimeout) + .source(sourceBuilder); this.includes = includes; } - - /** Executes request using either {@param searchAction} or {@param scrollAction} as appropriate. + /** + * Executes request using either {@param searchAction} or {@param scrollAction} as appropriate. */ @Override - public OpenSearchResponse search(Function searchAction, - Function scrollAction) { + public OpenSearchResponse search( + Function searchAction, + Function scrollAction) { SearchResponse openSearchResponse; if (isScroll()) { openSearchResponse = scrollAction.apply(scrollRequest()); @@ -172,6 +165,7 @@ public void writeTo(StreamOutput out) throws IOException { /** * Constructs OpenSearchScrollRequest from serialized representation. + * * @param in stream to read data from. * @param engine OpenSearchSqlEngine to get node-specific context. * @throws IOException thrown if reading from input {@code in} fails. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java index a2fbf79624..2969c7639b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import java.util.List; import org.opensearch.sql.data.model.ExprValue; -/** - * OpenSearch system request query against the system index. - */ +/** OpenSearch system request query against the system index. */ public interface OpenSearchSystemRequest { /** diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java index 03abfbf6c1..e43777a740 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static org.opensearch.sql.opensearch.storage.OpenSearchIndex.METADATAFIELD_TYPE_MAP; @@ -35,52 +34,37 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; -/** - * OpenSearch search response. - */ +/** OpenSearch search response. */ @EqualsAndHashCode @ToString public class OpenSearchResponse implements Iterable { - /** - * Search query result (non-aggregation). - */ + /** Search query result (non-aggregation). */ private final SearchHits hits; - /** - * Search aggregation result. - */ + /** Search aggregation result. */ private final Aggregations aggregations; - /** - * List of requested include fields. - */ + /** List of requested include fields. */ private final List includes; - /** - * OpenSearchExprValueFactory used to build ExprValue from search result. - */ - @EqualsAndHashCode.Exclude - private final OpenSearchExprValueFactory exprValueFactory; + /** OpenSearchExprValueFactory used to build ExprValue from search result. */ + @EqualsAndHashCode.Exclude private final OpenSearchExprValueFactory exprValueFactory; - /** - * Constructor of OpenSearchResponse. - */ - public OpenSearchResponse(SearchResponse searchResponse, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + /** Constructor of OpenSearchResponse. */ + public OpenSearchResponse( + SearchResponse searchResponse, + OpenSearchExprValueFactory exprValueFactory, + List includes) { this.hits = searchResponse.getHits(); this.aggregations = searchResponse.getAggregations(); this.exprValueFactory = exprValueFactory; this.includes = includes; } - /** - * Constructor of OpenSearchResponse with SearchHits. - */ - public OpenSearchResponse(SearchHits hits, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + /** Constructor of OpenSearchResponse with SearchHits. */ + public OpenSearchResponse( + SearchHits hits, OpenSearchExprValueFactory exprValueFactory, List includes) { this.hits = hits; this.aggregations = null; this.exprValueFactory = exprValueFactory; @@ -111,48 +95,52 @@ public Iterator iterator() { return handleAggregationResponse(); } else { return Arrays.stream(hits.getHits()) - .map(hit -> { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - addParsedHitsToBuilder(builder, hit); - addMetaDataFieldsToBuilder(builder, hit); - addHighlightsToBuilder(builder, hit); - return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); - }).iterator(); + .map( + hit -> { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + addParsedHitsToBuilder(builder, hit); + addMetaDataFieldsToBuilder(builder, hit); + addHighlightsToBuilder(builder, hit); + return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); + }) + .iterator(); } } /** - * Parse response for all hits to add to builder. Inner_hits supports arrays of objects - * with nested type. + * Parse response for all hits to add to builder. Inner_hits supports arrays of objects with + * nested type. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addParsedHitsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { + ImmutableMap.Builder builder, SearchHit hit) { builder.putAll( - exprValueFactory.construct( - hit.getSourceAsString(), - !(hit.getInnerHits() == null || hit.getInnerHits().isEmpty()) - ).tupleValue()); + exprValueFactory + .construct( + hit.getSourceAsString(), + !(hit.getInnerHits() == null || hit.getInnerHits().isEmpty())) + .tupleValue()); } /** * If highlight fields are present in response add the fields to the builder. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addHighlightsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { + ImmutableMap.Builder builder, SearchHit hit) { if (!hit.getHighlightFields().isEmpty()) { var hlBuilder = ImmutableMap.builder(); for (var es : hit.getHighlightFields().entrySet()) { - hlBuilder.put(es.getKey(), ExprValueUtils.collectionValue( - Arrays.stream(es.getValue().fragments()).map( - Text::toString).collect(Collectors.toList()))); + hlBuilder.put( + es.getKey(), + ExprValueUtils.collectionValue( + Arrays.stream(es.getValue().fragments()) + .map(Text::toString) + .collect(Collectors.toList()))); } builder.put("_highlight", ExprTupleValue.fromExprValueMap(hlBuilder.build())); } @@ -160,58 +148,56 @@ private void addHighlightsToBuilder( /** * Add metadata fields to builder from response. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addMetaDataFieldsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { - List metaDataFieldSet = includes.stream() - .filter(METADATAFIELD_TYPE_MAP::containsKey) - .collect(Collectors.toList()); - ExprFloatValue maxScore = Float.isNaN(hits.getMaxScore()) - ? null : new ExprFloatValue(hits.getMaxScore()); - - metaDataFieldSet.forEach(metaDataField -> { - if (metaDataField.equals(METADATA_FIELD_INDEX)) { - builder.put(METADATA_FIELD_INDEX, new ExprStringValue(hit.getIndex())); - } else if (metaDataField.equals(METADATA_FIELD_ID)) { - builder.put(METADATA_FIELD_ID, new ExprStringValue(hit.getId())); - } else if (metaDataField.equals(METADATA_FIELD_SCORE)) { - if (!Float.isNaN(hit.getScore())) { - builder.put(METADATA_FIELD_SCORE, new ExprFloatValue(hit.getScore())); - } - } else if (metaDataField.equals(METADATA_FIELD_MAXSCORE)) { - if (maxScore != null) { - builder.put(METADATA_FIELD_MAXSCORE, maxScore); - } - } else if (metaDataField.equals(METADATA_FIELD_SORT)) { - builder.put(METADATA_FIELD_SORT, new ExprLongValue(hit.getSeqNo())); - } else { // if (metaDataField.equals(METADATA_FIELD_ROUTING)){ - builder.put(METADATA_FIELD_ROUTING, new ExprStringValue(hit.getShard().toString())); - } - }); + ImmutableMap.Builder builder, SearchHit hit) { + List metaDataFieldSet = + includes.stream().filter(METADATAFIELD_TYPE_MAP::containsKey).collect(Collectors.toList()); + ExprFloatValue maxScore = + Float.isNaN(hits.getMaxScore()) ? null : new ExprFloatValue(hits.getMaxScore()); + + metaDataFieldSet.forEach( + metaDataField -> { + if (metaDataField.equals(METADATA_FIELD_INDEX)) { + builder.put(METADATA_FIELD_INDEX, new ExprStringValue(hit.getIndex())); + } else if (metaDataField.equals(METADATA_FIELD_ID)) { + builder.put(METADATA_FIELD_ID, new ExprStringValue(hit.getId())); + } else if (metaDataField.equals(METADATA_FIELD_SCORE)) { + if (!Float.isNaN(hit.getScore())) { + builder.put(METADATA_FIELD_SCORE, new ExprFloatValue(hit.getScore())); + } + } else if (metaDataField.equals(METADATA_FIELD_MAXSCORE)) { + if (maxScore != null) { + builder.put(METADATA_FIELD_MAXSCORE, maxScore); + } + } else if (metaDataField.equals(METADATA_FIELD_SORT)) { + builder.put(METADATA_FIELD_SORT, new ExprLongValue(hit.getSeqNo())); + } else { // if (metaDataField.equals(METADATA_FIELD_ROUTING)){ + builder.put(METADATA_FIELD_ROUTING, new ExprStringValue(hit.getShard().toString())); + } + }); } /** * Handle an aggregation response. + * * @return Parsed and built return values from response. */ private Iterator handleAggregationResponse() { - return exprValueFactory.getParser().parse(aggregations).stream().map(entry -> { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (Map.Entry value : entry.entrySet()) { - builder.put( - value.getKey(), - exprValueFactory.construct( - value.getKey(), - value.getValue(), - false - ) - ); - } - return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); - }).iterator(); + return exprValueFactory.getParser().parse(aggregations).stream() + .map( + entry -> { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (Map.Entry value : entry.entrySet()) { + builder.put( + value.getKey(), + exprValueFactory.construct(value.getKey(), value.getValue(), false)); + } + return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); + }) + .iterator(); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java index 384e07ad8f..1492fedfc2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java @@ -23,9 +23,7 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.NumericMetricsAggregation; -/** - * {@link NumericMetricsAggregation.SingleValue} metric parser. - */ +/** {@link NumericMetricsAggregation.SingleValue} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class SingleValueParser implements MetricParser { @@ -35,7 +33,6 @@ public class SingleValueParser implements MetricParser { @Override public Map parse(Aggregation agg) { return Collections.singletonMap( - agg.getName(), - handleNanInfValue(((NumericMetricsAggregation.SingleValue) agg).value())); + agg.getName(), handleNanInfValue(((NumericMetricsAggregation.SingleValue) agg).value())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java index c80b75de05..82a2f8648f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java @@ -24,9 +24,7 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.ExtendedStats; -/** - * {@link ExtendedStats} metric parser. - */ +/** {@link ExtendedStats} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class StatsParser implements MetricParser { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java index a98e1b4ce3..b29b44f033 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.agg; import java.util.Arrays; @@ -16,21 +15,19 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.TopHits; -/** - * {@link TopHits} metric parser. - */ +/** {@link TopHits} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class TopHitsParser implements MetricParser { - @Getter - private final String name; + @Getter private final String name; @Override public Map parse(Aggregation agg) { return Collections.singletonMap( agg.getName(), Arrays.stream(((TopHits) agg).getHits().getHits()) - .flatMap(h -> h.getSourceAsMap().values().stream()).collect(Collectors.toList())); + .flatMap(h -> h.getSourceAsMap().values().stream()) + .collect(Collectors.toList())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java index 953f4d19b4..9ce46c6de6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java @@ -19,6 +19,7 @@ public class Utils { /** * Utils to handle Nan/Infinite Value. + * * @return null if is Nan or is +-Infinity. */ public static Object handleNanInfValue(double value) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java index 0c1b2e58b1..95c52ea275 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.security; import java.security.AccessController; @@ -17,9 +16,7 @@ */ public class SecurityAccess { - /** - * Execute the operation in privileged mode. - */ + /** Execute the operation in privileged mode. */ public static T doPrivileged(final PrivilegedExceptionAction operation) { SpecialPermission.check(); try { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java index 0810312974..133903dabe 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.setting; import static org.opensearch.common.settings.Settings.EMPTY; @@ -27,129 +26,172 @@ import org.opensearch.sql.common.setting.LegacySettings; import org.opensearch.sql.common.setting.Settings; -/** - * Setting implementation on OpenSearch. - */ +/** Setting implementation on OpenSearch. */ @Log4j2 public class OpenSearchSettings extends Settings { - /** - * Default settings. - */ + /** Default settings. */ private final Map> defaultSettings; - /** - * Latest setting value for each registered key. Thread-safe is required. - */ + + /** Latest setting value for each registered key. Thread-safe is required. */ @VisibleForTesting private final Map latestSettings = new ConcurrentHashMap<>(); - public static final Setting SQL_ENABLED_SETTING = Setting.boolSetting( - Key.SQL_ENABLED.getKeyValue(), - LegacyOpenDistroSettings.SQL_ENABLED_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_SLOWLOG_SETTING = Setting.intSetting( - Key.SQL_SLOWLOG.getKeyValue(), - LegacyOpenDistroSettings.SQL_QUERY_SLOWLOG_SETTING, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_CURSOR_KEEP_ALIVE_SETTING = Setting.positiveTimeSetting( - Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), - LegacyOpenDistroSettings.SQL_CURSOR_KEEPALIVE_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_DELETE_ENABLED_SETTING = Setting.boolSetting( - Key.SQL_DELETE_ENABLED.getKeyValue(), - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting PPL_ENABLED_SETTING = Setting.boolSetting( - Key.PPL_ENABLED.getKeyValue(), - LegacyOpenDistroSettings.PPL_ENABLED_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting QUERY_MEMORY_LIMIT_SETTING = new Setting<>( - Key.QUERY_MEMORY_LIMIT.getKeyValue(), - LegacyOpenDistroSettings.PPL_QUERY_MEMORY_LIMIT_SETTING, - (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio( - s, LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue()), - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting QUERY_SIZE_LIMIT_SETTING = Setting.intSetting( - Key.QUERY_SIZE_LIMIT.getKeyValue(), - LegacyOpenDistroSettings.QUERY_SIZE_LIMIT_SETTING, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting METRICS_ROLLING_WINDOW_SETTING = Setting.longSetting( - Key.METRICS_ROLLING_WINDOW.getKeyValue(), - LegacyOpenDistroSettings.METRICS_ROLLING_WINDOW_SETTING, - 2L, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting METRICS_ROLLING_INTERVAL_SETTING = Setting.longSetting( - Key.METRICS_ROLLING_INTERVAL.getKeyValue(), - LegacyOpenDistroSettings.METRICS_ROLLING_INTERVAL_SETTING, - 1L, - Setting.Property.NodeScope, - Setting.Property.Dynamic); + public static final Setting SQL_ENABLED_SETTING = + Setting.boolSetting( + Key.SQL_ENABLED.getKeyValue(), + LegacyOpenDistroSettings.SQL_ENABLED_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_SLOWLOG_SETTING = + Setting.intSetting( + Key.SQL_SLOWLOG.getKeyValue(), + LegacyOpenDistroSettings.SQL_QUERY_SLOWLOG_SETTING, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_CURSOR_KEEP_ALIVE_SETTING = + Setting.positiveTimeSetting( + Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), + LegacyOpenDistroSettings.SQL_CURSOR_KEEPALIVE_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_DELETE_ENABLED_SETTING = + Setting.boolSetting( + Key.SQL_DELETE_ENABLED.getKeyValue(), + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting PPL_ENABLED_SETTING = + Setting.boolSetting( + Key.PPL_ENABLED.getKeyValue(), + LegacyOpenDistroSettings.PPL_ENABLED_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting QUERY_MEMORY_LIMIT_SETTING = + new Setting<>( + Key.QUERY_MEMORY_LIMIT.getKeyValue(), + LegacyOpenDistroSettings.PPL_QUERY_MEMORY_LIMIT_SETTING, + (s) -> + MemorySizeValue.parseBytesSizeValueOrHeapRatio( + s, LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue()), + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting QUERY_SIZE_LIMIT_SETTING = + Setting.intSetting( + Key.QUERY_SIZE_LIMIT.getKeyValue(), + LegacyOpenDistroSettings.QUERY_SIZE_LIMIT_SETTING, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting METRICS_ROLLING_WINDOW_SETTING = + Setting.longSetting( + Key.METRICS_ROLLING_WINDOW.getKeyValue(), + LegacyOpenDistroSettings.METRICS_ROLLING_WINDOW_SETTING, + 2L, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting METRICS_ROLLING_INTERVAL_SETTING = + Setting.longSetting( + Key.METRICS_ROLLING_INTERVAL.getKeyValue(), + LegacyOpenDistroSettings.METRICS_ROLLING_INTERVAL_SETTING, + 1L, + Setting.Property.NodeScope, + Setting.Property.Dynamic); // we are keeping this to not break upgrades if the config is already present. // This will be completely removed in 3.0. - public static final Setting DATASOURCE_CONFIG = SecureSetting.secureFile( - "plugins.query.federation.datasources.config", - null, - Setting.Property.Deprecated); - - public static final Setting DATASOURCE_MASTER_SECRET_KEY = Setting.simpleString( - ENCYRPTION_MASTER_KEY.getKeyValue(), - Setting.Property.NodeScope, - Setting.Property.Final, - Setting.Property.Filtered); - - public static final Setting DATASOURCE_URI_ALLOW_HOSTS = Setting.simpleString( - Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue(), - ".*", - Setting.Property.NodeScope, - Setting.Property.Dynamic); + public static final Setting DATASOURCE_CONFIG = + SecureSetting.secureFile( + "plugins.query.federation.datasources.config", null, Setting.Property.Deprecated); - /** - * Construct OpenSearchSetting. - * The OpenSearchSetting must be singleton. - */ + public static final Setting DATASOURCE_MASTER_SECRET_KEY = + Setting.simpleString( + ENCYRPTION_MASTER_KEY.getKeyValue(), + Setting.Property.NodeScope, + Setting.Property.Final, + Setting.Property.Filtered); + + public static final Setting DATASOURCE_URI_ALLOW_HOSTS = + Setting.simpleString( + Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue(), + ".*", + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + /** Construct OpenSearchSetting. The OpenSearchSetting must be singleton. */ @SuppressWarnings("unchecked") public OpenSearchSettings(ClusterSettings clusterSettings) { ImmutableMap.Builder> settingBuilder = new ImmutableMap.Builder<>(); - register(settingBuilder, clusterSettings, Key.SQL_ENABLED, - SQL_ENABLED_SETTING, new Updater(Key.SQL_ENABLED)); - register(settingBuilder, clusterSettings, Key.SQL_SLOWLOG, - SQL_SLOWLOG_SETTING, new Updater(Key.SQL_SLOWLOG)); - register(settingBuilder, clusterSettings, Key.SQL_CURSOR_KEEP_ALIVE, - SQL_CURSOR_KEEP_ALIVE_SETTING, new Updater(Key.SQL_CURSOR_KEEP_ALIVE)); - register(settingBuilder, clusterSettings, Key.SQL_DELETE_ENABLED, - SQL_DELETE_ENABLED_SETTING, new Updater(Key.SQL_DELETE_ENABLED)); - register(settingBuilder, clusterSettings, Key.PPL_ENABLED, - PPL_ENABLED_SETTING, new Updater(Key.PPL_ENABLED)); - register(settingBuilder, clusterSettings, Key.QUERY_MEMORY_LIMIT, - QUERY_MEMORY_LIMIT_SETTING, new Updater(Key.QUERY_MEMORY_LIMIT)); - register(settingBuilder, clusterSettings, Key.QUERY_SIZE_LIMIT, - QUERY_SIZE_LIMIT_SETTING, new Updater(Key.QUERY_SIZE_LIMIT)); - register(settingBuilder, clusterSettings, Key.METRICS_ROLLING_WINDOW, - METRICS_ROLLING_WINDOW_SETTING, new Updater(Key.METRICS_ROLLING_WINDOW)); - register(settingBuilder, clusterSettings, Key.METRICS_ROLLING_INTERVAL, - METRICS_ROLLING_INTERVAL_SETTING, new Updater(Key.METRICS_ROLLING_INTERVAL)); - register(settingBuilder, clusterSettings, Key.DATASOURCES_URI_ALLOWHOSTS, - DATASOURCE_URI_ALLOW_HOSTS, new Updater(Key.DATASOURCES_URI_ALLOWHOSTS)); - registerNonDynamicSettings(settingBuilder, clusterSettings, Key.CLUSTER_NAME, - ClusterName.CLUSTER_NAME_SETTING); + register( + settingBuilder, + clusterSettings, + Key.SQL_ENABLED, + SQL_ENABLED_SETTING, + new Updater(Key.SQL_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.SQL_SLOWLOG, + SQL_SLOWLOG_SETTING, + new Updater(Key.SQL_SLOWLOG)); + register( + settingBuilder, + clusterSettings, + Key.SQL_CURSOR_KEEP_ALIVE, + SQL_CURSOR_KEEP_ALIVE_SETTING, + new Updater(Key.SQL_CURSOR_KEEP_ALIVE)); + register( + settingBuilder, + clusterSettings, + Key.SQL_DELETE_ENABLED, + SQL_DELETE_ENABLED_SETTING, + new Updater(Key.SQL_DELETE_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.PPL_ENABLED, + PPL_ENABLED_SETTING, + new Updater(Key.PPL_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.QUERY_MEMORY_LIMIT, + QUERY_MEMORY_LIMIT_SETTING, + new Updater(Key.QUERY_MEMORY_LIMIT)); + register( + settingBuilder, + clusterSettings, + Key.QUERY_SIZE_LIMIT, + QUERY_SIZE_LIMIT_SETTING, + new Updater(Key.QUERY_SIZE_LIMIT)); + register( + settingBuilder, + clusterSettings, + Key.METRICS_ROLLING_WINDOW, + METRICS_ROLLING_WINDOW_SETTING, + new Updater(Key.METRICS_ROLLING_WINDOW)); + register( + settingBuilder, + clusterSettings, + Key.METRICS_ROLLING_INTERVAL, + METRICS_ROLLING_INTERVAL_SETTING, + new Updater(Key.METRICS_ROLLING_INTERVAL)); + register( + settingBuilder, + clusterSettings, + Key.DATASOURCES_URI_ALLOWHOSTS, + DATASOURCE_URI_ALLOW_HOSTS, + new Updater(Key.DATASOURCES_URI_ALLOWHOSTS)); + registerNonDynamicSettings( + settingBuilder, clusterSettings, Key.CLUSTER_NAME, ClusterName.CLUSTER_NAME_SETTING); defaultSettings = settingBuilder.build(); } @@ -159,36 +201,33 @@ public T getSettingValue(Settings.Key key) { return (T) latestSettings.getOrDefault(key, defaultSettings.get(key).getDefault(EMPTY)); } - /** - * Register the pair of {key, setting}. - */ - private void register(ImmutableMap.Builder> settingBuilder, - ClusterSettings clusterSettings, Settings.Key key, - Setting setting, - Consumer updater) { + /** Register the pair of {key, setting}. */ + private void register( + ImmutableMap.Builder> settingBuilder, + ClusterSettings clusterSettings, + Settings.Key key, + Setting setting, + Consumer updater) { if (clusterSettings.get(setting) != null) { latestSettings.put(key, clusterSettings.get(setting)); } settingBuilder.put(key, setting); - clusterSettings - .addSettingsUpdateConsumer(setting, updater); + clusterSettings.addSettingsUpdateConsumer(setting, updater); } - /** - * Register Non Dynamic Settings without consumer. - */ + /** Register Non Dynamic Settings without consumer. */ private void registerNonDynamicSettings( ImmutableMap.Builder> settingBuilder, - ClusterSettings clusterSettings, Settings.Key key, + ClusterSettings clusterSettings, + Settings.Key key, Setting setting) { settingBuilder.put(key, setting); latestSettings.put(key, clusterSettings.get(setting)); } - /** - * Add the inner class only for UT coverage purpose. - * Lambda could be much elegant solution. But which is hard to test. + * Add the inner class only for UT coverage purpose. Lambda could be much elegant solution. But + * which is hard to test. */ @VisibleForTesting @RequiredArgsConstructor @@ -202,9 +241,7 @@ public void accept(Object newValue) { } } - /** - * Used by Plugin to init Setting. - */ + /** Used by Plugin to init Setting. */ public static List> pluginSettings() { return new ImmutableList.Builder>() .add(SQL_ENABLED_SETTING) @@ -220,9 +257,7 @@ public static List> pluginSettings() { .build(); } - /** - * Init Non Dynamic Plugin Settings. - */ + /** Init Non Dynamic Plugin Settings. */ public static List> pluginNonDynamicSettings() { return new ImmutableList.Builder>() .add(DATASOURCE_MASTER_SECRET_KEY) @@ -230,9 +265,7 @@ public static List> pluginNonDynamicSettings() { .build(); } - /** - * Used by local cluster to get settings from a setting instance. - */ + /** Used by local cluster to get settings from a setting instance. */ public List> getSettings() { return pluginSettings(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java index 62617f744e..c6afdb8511 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import com.google.common.annotations.VisibleForTesting; @@ -47,43 +46,33 @@ public class OpenSearchIndex implements Table { public static final String METADATA_FIELD_ROUTING = "_routing"; - public static final java.util.Map METADATAFIELD_TYPE_MAP = Map.of( - METADATA_FIELD_ID, ExprCoreType.STRING, - METADATA_FIELD_INDEX, ExprCoreType.STRING, - METADATA_FIELD_SCORE, ExprCoreType.FLOAT, - METADATA_FIELD_MAXSCORE, ExprCoreType.FLOAT, - METADATA_FIELD_SORT, ExprCoreType.LONG, - METADATA_FIELD_ROUTING, ExprCoreType.STRING - ); + public static final java.util.Map METADATAFIELD_TYPE_MAP = + Map.of( + METADATA_FIELD_ID, ExprCoreType.STRING, + METADATA_FIELD_INDEX, ExprCoreType.STRING, + METADATA_FIELD_SCORE, ExprCoreType.FLOAT, + METADATA_FIELD_MAXSCORE, ExprCoreType.FLOAT, + METADATA_FIELD_SORT, ExprCoreType.LONG, + METADATA_FIELD_ROUTING, ExprCoreType.STRING); /** OpenSearch client connection. */ private final OpenSearchClient client; private final Settings settings; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final OpenSearchRequest.IndexName indexName; - /** - * The cached mapping of field and type in index. - */ + /** The cached mapping of field and type in index. */ private Map cachedFieldOpenSearchTypes = null; - /** - * The cached ExprType of fields. - */ + /** The cached ExprType of fields. */ private Map cachedFieldTypes = null; - /** - * The cached max result window setting of index. - */ + /** The cached max result window setting of index. */ private Integer cachedMaxResultWindow = null; - /** - * Constructor. - */ + /** Constructor. */ public OpenSearchIndex(OpenSearchClient client, Settings settings, String indexName) { this.client = client; this.settings = settings; @@ -113,22 +102,24 @@ public void create(Map schema) { * or lazy evaluate when query engine pulls field type. */ /** - * Get simplified parsed mapping info. Unlike {@link #getFieldOpenSearchTypes()} - * it returns a flattened map. + * Get simplified parsed mapping info. Unlike {@link #getFieldOpenSearchTypes()} it returns a + * flattened map. + * * @return A map between field names and matching `ExprCoreType`s. */ @Override public Map getFieldTypes() { if (cachedFieldOpenSearchTypes == null) { - cachedFieldOpenSearchTypes = new OpenSearchDescribeIndexRequest(client, indexName) - .getFieldTypes(); + cachedFieldOpenSearchTypes = + new OpenSearchDescribeIndexRequest(client, indexName).getFieldTypes(); } if (cachedFieldTypes == null) { - cachedFieldTypes = OpenSearchDataType.traverseAndFlatten(cachedFieldOpenSearchTypes) - .entrySet().stream().collect( - LinkedHashMap::new, - (map, item) -> map.put(item.getKey(), item.getValue().getExprType()), - Map::putAll); + cachedFieldTypes = + OpenSearchDataType.traverseAndFlatten(cachedFieldOpenSearchTypes).entrySet().stream() + .collect( + LinkedHashMap::new, + (map, item) -> map.put(item.getKey(), item.getValue().getExprType()), + Map::putAll); } return cachedFieldTypes; } @@ -140,19 +131,18 @@ public Map getReservedFieldTypes() { /** * Get parsed mapping info. + * * @return A complete map between field names and their types. */ public Map getFieldOpenSearchTypes() { if (cachedFieldOpenSearchTypes == null) { - cachedFieldOpenSearchTypes = new OpenSearchDescribeIndexRequest(client, indexName) - .getFieldTypes(); + cachedFieldOpenSearchTypes = + new OpenSearchDescribeIndexRequest(client, indexName).getFieldTypes(); } return cachedFieldOpenSearchTypes; } - /** - * Get the max result window setting of the table. - */ + /** Get the max result window setting of the table. */ public Integer getMaxResultWindow() { if (cachedMaxResultWindow == null) { cachedMaxResultWindow = @@ -161,9 +151,7 @@ public Integer getMaxResultWindow() { return cachedMaxResultWindow; } - /** - * TODO: Push down operations to index scan operator as much as possible in future. - */ + /** TODO: Push down operations to index scan operator as much as possible in future. */ @Override public PhysicalPlan implement(LogicalPlan plan) { // TODO: Leave it here to avoid impact Prometheus and AD operators. Need to move to Planner. @@ -175,12 +163,13 @@ public TableScanBuilder createScanBuilder() { final int querySizeLimit = settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT); final TimeValue cursorKeepAlive = settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - var builder = new OpenSearchRequestBuilder( - querySizeLimit, - createExprValueFactory()); + var builder = new OpenSearchRequestBuilder(querySizeLimit, createExprValueFactory()); Function createScanOperator = - requestBuilder -> new OpenSearchIndexScan(client, requestBuilder.getMaxResponseSize(), - requestBuilder.build(indexName, getMaxResultWindow(), cursorKeepAlive)); + requestBuilder -> + new OpenSearchIndexScan( + client, + requestBuilder.getMaxResponseSize(), + requestBuilder.build(indexName, getMaxResultWindow(), cursorKeepAlive)); return new OpenSearchIndexScanBuilder(builder, createScanOperator); } @@ -193,27 +182,27 @@ private OpenSearchExprValueFactory createExprValueFactory() { @VisibleForTesting @RequiredArgsConstructor - public static class OpenSearchDefaultImplementor - extends DefaultImplementor { + public static class OpenSearchDefaultImplementor extends DefaultImplementor { private final OpenSearchClient client; @Override public PhysicalPlan visitMLCommons(LogicalMLCommons node, OpenSearchIndexScan context) { - return new MLCommonsOperator(visitChild(node, context), node.getAlgorithm(), - node.getArguments(), client.getNodeClient()); + return new MLCommonsOperator( + visitChild(node, context), + node.getAlgorithm(), + node.getArguments(), + client.getNodeClient()); } @Override public PhysicalPlan visitAD(LogicalAD node, OpenSearchIndexScan context) { - return new ADOperator(visitChild(node, context), - node.getArguments(), client.getNodeClient()); + return new ADOperator(visitChild(node, context), node.getArguments(), client.getNodeClient()); } @Override public PhysicalPlan visitML(LogicalML node, OpenSearchIndexScan context) { - return new MLOperator(visitChild(node, context), - node.getArguments(), client.getNodeClient()); + return new MLOperator(visitChild(node, context), node.getArguments(), client.getNodeClient()); } } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java index c915fa549b..7c022e2190 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.opensearch.sql.utils.SystemIndexUtils.isSystemIndex; @@ -22,10 +21,9 @@ public class OpenSearchStorageEngine implements StorageEngine { /** OpenSearch client connection. */ - @Getter - private final OpenSearchClient client; - @Getter - private final Settings settings; + @Getter private final OpenSearchClient client; + + @Getter private final Settings settings; @Override public Table getTable(DataSourceSchemaName dataSourceSchemaName, String name) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java index 0ca9cde3d2..b2e9319bb1 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import java.io.IOException; @@ -26,9 +25,7 @@ import org.opensearch.sql.planner.SerializablePlan; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch index scan operator. - */ +/** OpenSearch index scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class OpenSearchIndexScan extends TableScanOperator implements SerializablePlan { @@ -37,14 +34,10 @@ public class OpenSearchIndexScan extends TableScanOperator implements Serializab private OpenSearchClient client; /** Search request. */ - @EqualsAndHashCode.Include - @ToString.Include - private OpenSearchRequest request; + @EqualsAndHashCode.Include @ToString.Include private OpenSearchRequest request; /** Largest number of rows allowed in the response. */ - @EqualsAndHashCode.Include - @ToString.Include - private int maxResponseSize; + @EqualsAndHashCode.Include @ToString.Include private int maxResponseSize; /** Number of rows returned. */ private Integer queryCount; @@ -52,12 +45,9 @@ public class OpenSearchIndexScan extends TableScanOperator implements Serializab /** Search response for current batch. */ private Iterator iterator; - /** - * Creates index scan based on a provided OpenSearchRequestBuilder. - */ - public OpenSearchIndexScan(OpenSearchClient client, - int maxResponseSize, - OpenSearchRequest request) { + /** Creates index scan based on a provided OpenSearchRequestBuilder. */ + public OpenSearchIndexScan( + OpenSearchClient client, int maxResponseSize, OpenSearchRequest request) { this.client = client; this.maxResponseSize = maxResponseSize; this.request = request; @@ -106,12 +96,13 @@ public String explain() { return request.toString(); } - /** No-args constructor. + /** + * No-args constructor. + * * @deprecated Exists only to satisfy Java serialization API. */ @Deprecated(since = "introduction") - public OpenSearchIndexScan() { - } + public OpenSearchIndexScan() {} @Override public void readExternal(ObjectInput in) throws IOException { @@ -119,8 +110,9 @@ public void readExternal(ObjectInput in) throws IOException { byte[] requestStream = new byte[reqSize]; in.read(requestStream); - var engine = (OpenSearchStorageEngine) ((PlanSerializer.CursorDeserializationStream) in) - .resolveObject("engine"); + var engine = + (OpenSearchStorageEngine) + ((PlanSerializer.CursorDeserializationStream) in).resolveObject("engine"); try (BytesStreamInput bsi = new BytesStreamInput(requestStream)) { request = new OpenSearchScrollRequest(bsi, engine); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java index d5f89d2579..02ac21a39d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java @@ -24,9 +24,7 @@ import org.opensearch.sql.planner.logical.LogicalFilter; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Index scan builder for aggregate query used by {@link OpenSearchIndexScanBuilder} internally. - */ +/** Index scan builder for aggregate query used by {@link OpenSearchIndexScanBuilder} internally. */ @EqualsAndHashCode class OpenSearchIndexScanAggregationBuilder implements PushDownQueryBuilder { @@ -42,9 +40,8 @@ class OpenSearchIndexScanAggregationBuilder implements PushDownQueryBuilder { /** Sorting items pushed down. */ private List> sortList; - - OpenSearchIndexScanAggregationBuilder(OpenSearchRequestBuilder requestBuilder, - LogicalAggregation aggregation) { + OpenSearchIndexScanAggregationBuilder( + OpenSearchRequestBuilder requestBuilder, LogicalAggregation aggregation) { this.requestBuilder = requestBuilder; aggregatorList = aggregation.getAggregatorList(); groupByList = aggregation.getGroupByList(); @@ -57,8 +54,7 @@ public OpenSearchRequestBuilder build() { Pair, OpenSearchAggregationResponseParser> aggregationBuilder = builder.buildAggregationBuilder(aggregatorList, groupByList, sortList); requestBuilder.pushDownAggregation(aggregationBuilder); - requestBuilder.pushTypeMapping( - builder.buildTypeMapping(aggregatorList, groupByList)); + requestBuilder.pushTypeMapping(builder.buildTypeMapping(aggregatorList, groupByList)); return requestBuilder; } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java index edcbedc7a7..8a2f3e98f4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java @@ -30,29 +30,24 @@ public class OpenSearchIndexScanBuilder extends TableScanBuilder { private final Function scanFactory; - /** - * Delegated index scan builder for non-aggregate or aggregate query. - */ - @EqualsAndHashCode.Include - private PushDownQueryBuilder delegate; + + /** Delegated index scan builder for non-aggregate or aggregate query. */ + @EqualsAndHashCode.Include private PushDownQueryBuilder delegate; /** Is limit operator pushed down. */ private boolean isLimitPushedDown = false; - /** - * Constructor used during query execution. - */ - public OpenSearchIndexScanBuilder(OpenSearchRequestBuilder requestBuilder, + /** Constructor used during query execution. */ + public OpenSearchIndexScanBuilder( + OpenSearchRequestBuilder requestBuilder, Function scanFactory) { this.delegate = new OpenSearchIndexScanQueryBuilder(requestBuilder); this.scanFactory = scanFactory; - } - /** - * Constructor used for unit tests. - */ - protected OpenSearchIndexScanBuilder(PushDownQueryBuilder translator, + /** Constructor used for unit tests. */ + protected OpenSearchIndexScanBuilder( + PushDownQueryBuilder translator, Function scanFactory) { this.delegate = translator; this.scanFactory = scanFactory; @@ -117,13 +112,16 @@ public boolean pushDownNested(LogicalNested nested) { /** * Valid if sorting is only by fields. + * * @param sort Logical sort * @return True if sorting by fields only */ private boolean sortByFieldsOnly(LogicalSort sort) { return sort.getSortList().stream() - .map(sortItem -> sortItem.getRight() instanceof ReferenceExpression - || isNestedFunction(sortItem.getRight())) + .map( + sortItem -> + sortItem.getRight() instanceof ReferenceExpression + || isNestedFunction(sortItem.getRight())) .reduce(true, Boolean::logicalAnd); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java index 590272a9f1..f4b0b05256 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java @@ -35,8 +35,8 @@ import org.opensearch.sql.planner.logical.LogicalSort; /** - * Index scan builder for simple non-aggregate query used by - * {@link OpenSearchIndexScanBuilder} internally. + * Index scan builder for simple non-aggregate query used by {@link OpenSearchIndexScanBuilder} + * internally. */ @VisibleForTesting @EqualsAndHashCode @@ -50,13 +50,11 @@ public OpenSearchIndexScanQueryBuilder(OpenSearchRequestBuilder requestBuilder) @Override public boolean pushDownFilter(LogicalFilter filter) { - FilterQueryBuilder queryBuilder = new FilterQueryBuilder( - new DefaultExpressionSerializer()); + FilterQueryBuilder queryBuilder = new FilterQueryBuilder(new DefaultExpressionSerializer()); Expression queryCondition = filter.getCondition(); QueryBuilder query = queryBuilder.build(queryCondition); requestBuilder.pushDownFilter(query); - requestBuilder.pushDownTrackedScore( - trackScoresFromOpenSearchFunction(queryCondition)); + requestBuilder.pushDownTrackedScore(trackScoresFromOpenSearchFunction(queryCondition)); return true; } @@ -64,9 +62,10 @@ public boolean pushDownFilter(LogicalFilter filter) { public boolean pushDownSort(LogicalSort sort) { List> sortList = sort.getSortList(); final SortQueryBuilder builder = new SortQueryBuilder(); - requestBuilder.pushDownSort(sortList.stream() - .map(sortItem -> builder.build(sortItem.getValue(), sortItem.getKey())) - .collect(Collectors.toList())); + requestBuilder.pushDownSort( + sortList.stream() + .map(sortItem -> builder.build(sortItem.getValue(), sortItem.getKey())) + .collect(Collectors.toList())); return true; } @@ -78,8 +77,7 @@ public boolean pushDownLimit(LogicalLimit limit) { @Override public boolean pushDownProject(LogicalProject project) { - requestBuilder.pushDownProjects( - findReferenceExpressions(project.getProjectList())); + requestBuilder.pushDownProjects(findReferenceExpressions(project.getProjectList())); // Return false intentionally to keep the original project operator return false; @@ -105,8 +103,8 @@ private boolean trackScoresFromOpenSearchFunction(Expression condition) { return true; } if (condition instanceof FunctionExpression) { - return ((FunctionExpression) condition).getArguments().stream() - .anyMatch(this::trackScoresFromOpenSearchFunction); + return ((FunctionExpression) condition) + .getArguments().stream().anyMatch(this::trackScoresFromOpenSearchFunction); } return false; } @@ -114,8 +112,7 @@ private boolean trackScoresFromOpenSearchFunction(Expression condition) { @Override public boolean pushDownNested(LogicalNested nested) { requestBuilder.pushDownNested(nested.getFields()); - requestBuilder.pushDownProjects( - findReferenceExpressions(nested.getProjectList())); + requestBuilder.pushDownProjects(findReferenceExpressions(nested.getProjectList())); // Return false intentionally to keep the original nested operator // Since we return false we need to pushDownProject here as it won't be // pushed down due to no matching push down rule. @@ -130,8 +127,8 @@ public OpenSearchRequestBuilder build() { /** * Find reference expression from expression. - * @param expressions a list of expression. * + * @param expressions a list of expression. * @return a set of ReferenceExpression */ public static Set findReferenceExpressions( @@ -145,18 +142,20 @@ public static Set findReferenceExpressions( /** * Find reference expression from expression. - * @param expression expression. * + * @param expression expression. * @return a list of ReferenceExpression */ public static List findReferenceExpression(NamedExpression expression) { List results = new ArrayList<>(); - expression.accept(new ExpressionNodeVisitor<>() { - @Override - public Object visitReference(ReferenceExpression node, Object context) { - return results.add(node); - } - }, null); + expression.accept( + new ExpressionNodeVisitor<>() { + @Override + public Object visitReference(ReferenceExpression node, Object context) { + return results.add(node); + } + }, + null); return results; } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java index 274bc4647d..b855b9a8b5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java @@ -14,9 +14,7 @@ import org.opensearch.sql.planner.logical.LogicalProject; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Translates a logical query plan into OpenSearch DSL and an appropriate request. - */ +/** Translates a logical query plan into OpenSearch DSL and an appropriate request. */ public interface PushDownQueryBuilder { default boolean pushDownFilter(LogicalFilter filter) { return false; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java index 7b68bd5c92..a485296b52 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script; import lombok.experimental.UtilityClass; @@ -12,6 +11,7 @@ public class StringUtils { /** * Converts sql wildcard character % and _ to * and ?. + * * @param text string to be converted * @return converted string */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java index a1b633f942..753c2bbbc7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java @@ -40,10 +40,12 @@ public abstract class LuceneQuery { /** * Check if function expression supported by current Lucene query. Default behavior is that report * supported if: + * *
    - *
  1. Left is a reference
  2. - *
  3. Right side is a literal
  4. + *
  5. Left is a reference + *
  6. Right side is a literal *
+ * * @param func function * @return return true if supported, otherwise false. */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java index 7e13cad592..2e33e3cc7c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import lombok.RequiredArgsConstructor; @@ -14,19 +13,19 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Lucene query that builds range query for non-quality comparison. - */ +/** Lucene query that builds range query for non-quality comparison. */ @RequiredArgsConstructor public class RangeQuery extends LuceneQuery { public enum Comparison { - LT, GT, LTE, GTE, BETWEEN + LT, + GT, + LTE, + GTE, + BETWEEN } - /** - * Comparison that range query build for. - */ + /** Comparison that range query build for. */ private final Comparison comparison; @Override @@ -55,5 +54,4 @@ private Object value(ExprValue literal) { return literal.value(); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java index c98de1cd84..cd506898d7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import org.opensearch.index.query.QueryBuilder; @@ -13,9 +12,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Lucene query that build term query for equality comparison. - */ +/** Lucene query that build term query for equality comparison. */ public class TermQuery extends LuceneQuery { @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java index 35d5a43a41..0346b7712e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java @@ -8,16 +8,14 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.QueryStringQueryBuilder; -/** - * Class for Lucene query that builds the 'query' query. - */ +/** Class for Lucene query that builds the 'query' query. */ public class QueryQuery extends NoFieldQuery { private final String queryQueryName = "query"; /** - * Default constructor for QueryQuery configures how RelevanceQuery.build() handles - * named arguments by calling the constructor of QueryStringQuery. + * Default constructor for QueryQuery configures how RelevanceQuery.build() handles named + * arguments by calling the constructor of QueryStringQuery. */ public QueryQuery() { super(FunctionParameterRepository.QueryStringQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java index 43131baa3e..410c55cea6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java @@ -9,13 +9,11 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.QueryStringQueryBuilder; -/** - * Class for Lucene query that builds the query_string query. - */ +/** Class for Lucene query that builds the query_string query. */ public class QueryStringQuery extends MultiFieldQuery { /** - * Default constructor for QueryString configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for QueryString configures how RelevanceQuery.build() handles named + * arguments. */ public QueryStringQuery() { super(FunctionParameterRepository.QueryStringQueryBuildActions); @@ -29,8 +27,8 @@ public QueryStringQuery() { * @return : Builder for query_string query */ @Override - protected QueryStringQueryBuilder createBuilder(ImmutableMap fields, - String query) { + protected QueryStringQueryBuilder createBuilder( + ImmutableMap fields, String query) { return QueryBuilders.queryStringQuery(query).fields(fields); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java index b8641a5c0b..87faf320ec 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java @@ -20,37 +20,39 @@ import org.opensearch.sql.expression.NamedArgumentExpression; import org.opensearch.sql.opensearch.storage.script.filter.lucene.LuceneQuery; -/** - * Base class for query abstraction that builds a relevance query from function expression. - */ +/** Base class for query abstraction that builds a relevance query from function expression. */ @RequiredArgsConstructor public abstract class RelevanceQuery extends LuceneQuery { - @Getter - private final Map> queryBuildActions; + @Getter private final Map> queryBuildActions; protected void ignoreArguments(List arguments) { - arguments.removeIf(a -> a.getArgName().equalsIgnoreCase("field") - || a.getArgName().equalsIgnoreCase("fields") - || a.getArgName().equalsIgnoreCase("query")); + arguments.removeIf( + a -> + a.getArgName().equalsIgnoreCase("field") + || a.getArgName().equalsIgnoreCase("fields") + || a.getArgName().equalsIgnoreCase("query")); } protected void checkValidArguments(String argNormalized, T queryBuilder) { if (!queryBuildActions.containsKey(argNormalized)) { throw new SemanticCheckException( - String.format("Parameter %s is invalid for %s function.", - argNormalized, queryBuilder.getWriteableName())); + String.format( + "Parameter %s is invalid for %s function.", + argNormalized, queryBuilder.getWriteableName())); } } protected T loadArguments(List arguments) throws SemanticCheckException { // Aggregate parameters by name, so getting a Map - arguments.stream().collect(Collectors.groupingBy(a -> a.getArgName().toLowerCase())) - .forEach((k, v) -> { - if (v.size() > 1) { - throw new SemanticCheckException( - String.format("Parameter '%s' can only be specified once.", k)); - } - }); + arguments.stream() + .collect(Collectors.groupingBy(a -> a.getArgName().toLowerCase())) + .forEach( + (k, v) -> { + if (v.size() > 1) { + throw new SemanticCheckException( + String.format("Parameter '%s' can only be specified once.", k)); + } + }); T queryBuilder = createQueryBuilder(arguments); @@ -63,9 +65,7 @@ protected T loadArguments(List arguments) throws Semant checkValidArguments(argNormalized, queryBuilder); - (Objects.requireNonNull( - queryBuildActions - .get(argNormalized))) + (Objects.requireNonNull(queryBuildActions.get(argNormalized))) .apply(queryBuilder, arg.getValue().valueOf()); } @@ -74,15 +74,16 @@ protected T loadArguments(List arguments) throws Semant @Override public QueryBuilder build(FunctionExpression func) { - var arguments = func.getArguments().stream() - .map(a -> (NamedArgumentExpression)a).collect(Collectors.toList()); + var arguments = + func.getArguments().stream() + .map(a -> (NamedArgumentExpression) a) + .collect(Collectors.toList()); if (arguments.size() < 2) { throw new SyntaxCheckException( String.format("%s requires at least two parameters", getQueryName())); } return loadArguments(arguments); - } protected abstract T createQueryBuilder(List arguments); @@ -90,12 +91,10 @@ public QueryBuilder build(FunctionExpression func) { protected abstract String getQueryName(); /** - * Convenience interface for a function that updates a QueryBuilder - * based on ExprValue. + * Convenience interface for a function that updates a QueryBuilder based on ExprValue. * * @param Concrete query builder */ - protected interface QueryBuilderStep extends - BiFunction { - } + protected interface QueryBuilderStep + extends BiFunction {} } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java index 157921572a..86dd44c118 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java @@ -11,16 +11,16 @@ public class SimpleQueryStringQuery extends MultiFieldQuery { /** - * Default constructor for SimpleQueryString configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for SimpleQueryString configures how RelevanceQuery.build() handles named + * arguments. */ public SimpleQueryStringQuery() { super(FunctionParameterRepository.SimpleQueryStringQueryBuildActions); } @Override - protected SimpleQueryStringBuilder createBuilder(ImmutableMap fields, - String query) { + protected SimpleQueryStringBuilder createBuilder( + ImmutableMap fields, String query) { return QueryBuilders.simpleQueryStringQuery(query).fields(fields); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java index ec110dfd8b..086aaddc5e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java @@ -26,18 +26,20 @@ public SingleFieldQuery(Map> queryBuildActions) { @Override protected T createQueryBuilder(List arguments) { // Extract 'field' and 'query' - var field = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("field")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'field' parameter is missing.")); + var field = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("field")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'field' parameter is missing.")); - var query = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("query")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); + var query = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("query")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); return createBuilder( - ((ReferenceExpression)field.getValue()).getAttr(), + ((ReferenceExpression) field.getValue()).getAttr(), query.getValue().valueOf().stringValue()); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java index 9fd37e3de7..7b9887e516 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java @@ -3,20 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene.relevance; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.WildcardQueryBuilder; import org.opensearch.sql.opensearch.storage.script.StringUtils; -/** - * Lucene query that builds wildcard query. - */ +/** Lucene query that builds wildcard query. */ public class WildcardQuery extends SingleFieldQuery { /** - * Default constructor for WildcardQuery configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for WildcardQuery configures how RelevanceQuery.build() handles named + * arguments. */ public WildcardQuery() { super(FunctionParameterRepository.WildcardQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java index 9002df7c8f..7669b569d4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.sort; import static org.opensearch.sql.analysis.NestedAnalyzer.generatePath; @@ -22,23 +21,17 @@ import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Builder of {@link SortBuilder}. - */ +/** Builder of {@link SortBuilder}. */ public class SortQueryBuilder { - /** - * The mapping between Core Engine sort order and OpenSearch sort order. - */ + /** The mapping between Core Engine sort order and OpenSearch sort order. */ private Map sortOrderMap = new ImmutableMap.Builder() .put(Sort.SortOrder.ASC, SortOrder.ASC) .put(Sort.SortOrder.DESC, SortOrder.DESC) .build(); - /** - * The mapping between Core Engine null order and OpenSearch null order. - */ + /** The mapping between Core Engine null order and OpenSearch null order. */ private Map missingMap = new ImmutableMap.Builder() .put(Sort.NullOrder.NULL_FIRST, "_first") @@ -61,14 +54,15 @@ public SortBuilder build(Expression expression, Sort.SortOption option) { } else if (isNestedFunction(expression)) { validateNestedArgs((FunctionExpression) expression); - String orderByName = ((FunctionExpression)expression).getArguments().get(0).toString(); + String orderByName = ((FunctionExpression) expression).getArguments().get(0).toString(); // Generate path if argument not supplied in function. - ReferenceExpression path = ((FunctionExpression)expression).getArguments().size() == 2 - ? (ReferenceExpression) ((FunctionExpression)expression).getArguments().get(1) - : generatePath(orderByName); + ReferenceExpression path = + ((FunctionExpression) expression).getArguments().size() == 2 + ? (ReferenceExpression) ((FunctionExpression) expression).getArguments().get(1) + : generatePath(orderByName); return SortBuilders.fieldSort(orderByName) - .order(sortOrderMap.get(option.getSortOrder())) - .setNestedSort(new NestedSortBuilder(path.toString())); + .order(sortOrderMap.get(option.getSortOrder())) + .setNestedSort(new NestedSortBuilder(path.toString())); } else { throw new IllegalStateException("unsupported expression " + expression.getClass()); } @@ -76,29 +70,26 @@ public SortBuilder build(Expression expression, Sort.SortOption option) { /** * Validate semantics for arguments in nested function. + * * @param nestedFunc Nested function expression. */ private void validateNestedArgs(FunctionExpression nestedFunc) { if (nestedFunc.getArguments().size() < 1 || nestedFunc.getArguments().size() > 2) { throw new IllegalArgumentException( - "nested function supports 2 parameters (field, path) or 1 parameter (field)" - ); + "nested function supports 2 parameters (field, path) or 1 parameter (field)"); } for (Expression arg : nestedFunc.getArguments()) { if (!(arg instanceof ReferenceExpression)) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", - arg.toString() - ) - ); + String.format("Illegal nested field name: %s", arg.toString())); } } } private FieldSortBuilder fieldBuild(ReferenceExpression ref, Sort.SortOption option) { return SortBuilders.fieldSort( - OpenSearchTextType.convertTextToKeyword(ref.getAttr(), ref.type())) + OpenSearchTextType.convertTextToKeyword(ref.getAttr(), ref.type())) .order(sortOrderMap.get(option.getSortOrder())) .missing(missingMap.get(option.getNullOrder())); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java index 7b6efeeba4..b1b2081f94 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.opensearch.sql.utils.SystemIndexUtils.systemTable; @@ -24,17 +23,12 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; -/** - * OpenSearch System Index Table Implementation. - */ +/** OpenSearch System Index Table Implementation. */ public class OpenSearchSystemIndex implements Table { - /** - * System Index Name. - */ + /** System Index Name. */ private final Pair systemIndexBundle; - public OpenSearchSystemIndex( - OpenSearchClient client, String indexName) { + public OpenSearchSystemIndex(OpenSearchClient client, String indexName) { this.systemIndexBundle = buildIndexBundle(client, indexName); } @@ -61,8 +55,7 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public class OpenSearchSystemIndexDefaultImplementor - extends DefaultImplementor { + public class OpenSearchSystemIndexDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitRelation(LogicalRelation node, Object context) { @@ -79,10 +72,11 @@ private Pair buildIndexBun OpenSearchClient client, String indexName) { SystemIndexUtils.SystemTable systemTable = systemTable(indexName); if (systemTable.isSystemInfoTable()) { - return Pair.of(OpenSearchSystemIndexSchema.SYS_TABLE_TABLES, - new OpenSearchCatIndicesRequest(client)); + return Pair.of( + OpenSearchSystemIndexSchema.SYS_TABLE_TABLES, new OpenSearchCatIndicesRequest(client)); } else { - return Pair.of(OpenSearchSystemIndexSchema.SYS_TABLE_MAPPINGS, + return Pair.of( + OpenSearchSystemIndexSchema.SYS_TABLE_MAPPINGS, new OpenSearchDescribeIndexRequest(client, systemTable.getTableName())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java index ee377263c1..57cdd52985 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import java.util.Iterator; @@ -14,21 +13,15 @@ import org.opensearch.sql.opensearch.request.system.OpenSearchSystemRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch index scan operator. - */ +/** OpenSearch index scan operator. */ @RequiredArgsConstructor @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class OpenSearchSystemIndexScan extends TableScanOperator { - /** - * OpenSearch request. - */ + /** OpenSearch request. */ private final OpenSearchSystemRequest request; - /** - * Search response for current batch. - */ + /** Search response for current batch. */ private Iterator iterator; @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java index aa09ff4660..781431ea67 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -15,53 +14,52 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.type.ExprType; -/** - * Definition of the system table schema. - */ +/** Definition of the system table schema. */ @Getter @RequiredArgsConstructor public enum OpenSearchSystemIndexSchema { - - SYS_TABLE_TABLES(new LinkedHashMap() {{ - put("TABLE_CAT", STRING); - put("TABLE_SCHEM", STRING); - put("TABLE_NAME", STRING); - put("TABLE_TYPE", STRING); - put("REMARKS", STRING); - put("TYPE_CAT", STRING); - put("TYPE_SCHEM", STRING); - put("TYPE_NAME", STRING); - put("SELF_REFERENCING_COL_NAME", STRING); - put("REF_GENERATION", STRING); - } - } - ), - SYS_TABLE_MAPPINGS(new ImmutableMap.Builder() - .put("TABLE_CAT", STRING) - .put("TABLE_SCHEM", STRING) - .put("TABLE_NAME", STRING) - .put("COLUMN_NAME", STRING) - .put("DATA_TYPE", STRING) - .put("TYPE_NAME", STRING) - .put("COLUMN_SIZE", STRING) - .put("BUFFER_LENGTH", STRING) - .put("DECIMAL_DIGITS", STRING) - .put("NUM_PREC_RADIX", STRING) - .put("NULLABLE", STRING) - .put("REMARKS", STRING) - .put("COLUMN_DEF", STRING) - .put("SQL_DATA_TYPE", STRING) - .put("SQL_DATETIME_SUB", STRING) - .put("CHAR_OCTET_LENGTH", STRING) - .put("ORDINAL_POSITION", STRING) - .put("IS_NULLABLE", STRING) - .put("SCOPE_CATALOG", STRING) - .put("SCOPE_SCHEMA", STRING) - .put("SCOPE_TABLE", STRING) - .put("SOURCE_DATA_TYPE", STRING) - .put("IS_AUTOINCREMENT", STRING) - .put("IS_GENERATEDCOLUMN", STRING) - .build()); + SYS_TABLE_TABLES( + new LinkedHashMap() { + { + put("TABLE_CAT", STRING); + put("TABLE_SCHEM", STRING); + put("TABLE_NAME", STRING); + put("TABLE_TYPE", STRING); + put("REMARKS", STRING); + put("TYPE_CAT", STRING); + put("TYPE_SCHEM", STRING); + put("TYPE_NAME", STRING); + put("SELF_REFERENCING_COL_NAME", STRING); + put("REF_GENERATION", STRING); + } + }), + SYS_TABLE_MAPPINGS( + new ImmutableMap.Builder() + .put("TABLE_CAT", STRING) + .put("TABLE_SCHEM", STRING) + .put("TABLE_NAME", STRING) + .put("COLUMN_NAME", STRING) + .put("DATA_TYPE", STRING) + .put("TYPE_NAME", STRING) + .put("COLUMN_SIZE", STRING) + .put("BUFFER_LENGTH", STRING) + .put("DECIMAL_DIGITS", STRING) + .put("NUM_PREC_RADIX", STRING) + .put("NULLABLE", STRING) + .put("REMARKS", STRING) + .put("COLUMN_DEF", STRING) + .put("SQL_DATA_TYPE", STRING) + .put("SQL_DATETIME_SUB", STRING) + .put("CHAR_OCTET_LENGTH", STRING) + .put("ORDINAL_POSITION", STRING) + .put("IS_NULLABLE", STRING) + .put("SCOPE_CATALOG", STRING) + .put("SCOPE_SCHEMA", STRING) + .put("SCOPE_TABLE", STRING) + .put("SOURCE_DATA_TYPE", STRING) + .put("IS_AUTOINCREMENT", STRING) + .put("IS_GENERATEDCOLUMN", STRING) + .build()); private final Map mapping; } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java index d985bcbeec..040b7d2759 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import static org.junit.jupiter.api.Assertions.assertAll; @@ -89,17 +88,14 @@ class OpenSearchNodeClientTest { @Mock(answer = RETURNS_DEEP_STUBS) private NodeClient nodeClient; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private GetIndexResponse indexResponse; + @Mock private GetIndexResponse indexResponse; - private final ExprTupleValue exprTupleValue = ExprTupleValue.fromExprValueMap( - Map.of("id", new ExprIntegerValue(1))); + private final ExprTupleValue exprTupleValue = + ExprTupleValue.fromExprValueMap(Map.of("id", new ExprIntegerValue(1))); private OpenSearchClient client; @@ -110,8 +106,7 @@ void setUp() { @Test void is_index_exist() { - when(nodeClient.admin().indices() - .exists(any(IndicesExistsRequest.class)).actionGet()) + when(nodeClient.admin().indices().exists(any(IndicesExistsRequest.class)).actionGet()) .thenReturn(new IndicesExistsResponse(true)); assertTrue(client.exists("test")); @@ -120,8 +115,7 @@ void is_index_exist() { @Test void is_index_not_exist() { String indexName = "test"; - when(nodeClient.admin().indices() - .exists(any(IndicesExistsRequest.class)).actionGet()) + when(nodeClient.admin().indices().exists(any(IndicesExistsRequest.class)).actionGet()) .thenReturn(new IndicesExistsResponse(false)); assertFalse(client.exists(indexName)); @@ -137,11 +131,8 @@ void is_index_exist_with_exception() { @Test void create_index() { String indexName = "test"; - Map mappings = ImmutableMap.of( - "properties", - ImmutableMap.of("name", "text")); - when(nodeClient.admin().indices() - .create(any(CreateIndexRequest.class)).actionGet()) + Map mappings = ImmutableMap.of("properties", ImmutableMap.of("name", "text")); + when(nodeClient.admin().indices().create(any(CreateIndexRequest.class)).actionGet()) .thenReturn(new CreateIndexResponse(true, true, indexName)); client.createIndex(indexName, mappings); @@ -151,8 +142,7 @@ void create_index() { void create_index_with_exception() { when(nodeClient.admin().indices().create(any())).thenThrow(RuntimeException.class); - assertThrows(IllegalStateException.class, - () -> client.createIndex("test", ImmutableMap.of())); + assertThrows(IllegalStateException.class, () -> client.createIndex("test", ImmutableMap.of())); } @Test @@ -172,58 +162,57 @@ void get_index_mappings() throws IOException { () -> assertEquals(10, mapping.size()), () -> assertEquals(17, parsedTypes.size()), () -> assertEquals("TEXT", mapping.get("address").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("address")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("address")), () -> assertEquals("INTEGER", mapping.get("age").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), - parsedTypes.get("age")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), parsedTypes.get("age")), () -> assertEquals("DOUBLE", mapping.get("balance").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Double), - parsedTypes.get("balance")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Double), parsedTypes.get("balance")), () -> assertEquals("KEYWORD", mapping.get("city").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("city")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("city")), () -> assertEquals("DATE", mapping.get("birthday").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("birthday")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Date), parsedTypes.get("birthday")), () -> assertEquals("GEO_POINT", mapping.get("location").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), - parsedTypes.get("location")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), parsedTypes.get("location")), // unknown type isn't parsed and ignored () -> assertFalse(mapping.containsKey("new_field")), () -> assertNull(parsedTypes.get("new_field")), () -> assertEquals("TEXT", mapping.get("field with spaces").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("field with spaces")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("field with spaces")), () -> assertEquals("TEXT", mapping.get("employer").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("employer")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("employer")), // `employer` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("employer")).getFields().size() > 0), + () -> assertTrue(((OpenSearchTextType) parsedTypes.get("employer")).getFields().size() > 0), () -> assertEquals("NESTED", mapping.get("projects").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Boolean), - parsedTypes.get("projects.active")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("projects.release")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects.members")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("projects.members.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Boolean), parsedTypes.get("projects.active")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Date), parsedTypes.get("projects.release")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects.members")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("projects.members.name")), () -> assertEquals("OBJECT", mapping.get("manager").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Object), - parsedTypes.get("manager")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("manager.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Object), parsedTypes.get("manager")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("manager.name")), // `manager.name` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("manager.name")) - .getFields().size() > 0), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("manager.address")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Long), - parsedTypes.get("manager.salary")) - ); + () -> + assertTrue( + ((OpenSearchTextType) parsedTypes.get("manager.name")).getFields().size() > 0), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("manager.address")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Long), parsedTypes.get("manager.salary"))); } @Test @@ -247,11 +236,8 @@ void get_index_mappings_with_IOException() { @Test void get_index_mappings_with_non_exist_index() { - when(nodeClient.admin().indices() - .prepareGetMappings(any()) - .setLocal(anyBoolean()) - .get() - ).thenThrow(IndexNotFoundException.class); + when(nodeClient.admin().indices().prepareGetMappings(any()).setLocal(anyBoolean()).get()) + .thenThrow(IndexNotFoundException.class); assertThrows(IndexNotFoundException.class, () -> client.getIndexMappings("non_exist_index")); } @@ -307,9 +293,7 @@ void search() { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1L, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); when(searchHit.getSourceAsString()).thenReturn("{\"id\", 1}"); when(searchHit.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue); @@ -320,9 +304,13 @@ void search() { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of("id")); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -355,9 +343,13 @@ void cleanup() { when(requestBuilder.addScrollId(any())).thenReturn(requestBuilder); when(requestBuilder.get()).thenReturn(null); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); @@ -372,9 +364,13 @@ void cleanup() { @Test void cleanup_without_scrollId() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.cleanup(request); verify(nodeClient, never()).prepareClearScroll(); } @@ -384,9 +380,13 @@ void cleanup_without_scrollId() { void cleanup_rethrows_exception() { when(nodeClient.prepareClearScroll()).thenThrow(new RuntimeException()); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); @@ -398,10 +398,8 @@ void get_indices() { AliasMetadata aliasMetadata = mock(AliasMetadata.class); final var openMap = Map.of("index", List.of(aliasMetadata)); when(aliasMetadata.alias()).thenReturn("index_alias"); - when(nodeClient.admin().indices() - .prepareGetIndex() - .setLocal(true) - .get()).thenReturn(indexResponse); + when(nodeClient.admin().indices().prepareGetIndex().setLocal(true).get()) + .thenReturn(indexResponse); when(indexResponse.getIndices()).thenReturn(new String[] {"index"}); when(indexResponse.aliases()).thenReturn(openMap); @@ -427,10 +425,8 @@ void ml() { public void mockNodeClientIndicesMappings(String indexName, String mappings) { GetMappingsResponse mockResponse = mock(GetMappingsResponse.class); MappingMetadata emptyMapping = mock(MappingMetadata.class); - when(nodeClient.admin().indices() - .prepareGetMappings(any()) - .setLocal(anyBoolean()) - .get()).thenReturn(mockResponse); + when(nodeClient.admin().indices().prepareGetMappings(any()).setLocal(anyBoolean()).get()) + .thenReturn(mockResponse); try { Map metadata; if (mappings.isEmpty()) { @@ -445,13 +441,12 @@ public void mockNodeClientIndicesMappings(String indexName, String mappings) { } } - private void mockNodeClientSettings(String indexName, String indexMetadata) - throws IOException { + private void mockNodeClientSettings(String indexName, String indexMetadata) throws IOException { GetSettingsResponse mockResponse = mock(GetSettingsResponse.class); when(nodeClient.admin().indices().prepareGetSettings(any()).setLocal(anyBoolean()).get()) .thenReturn(mockResponse); - Map metadata = Map.of(indexName, - IndexMetadata.fromXContent(createParser(indexMetadata)).getSettings()); + Map metadata = + Map.of(indexName, IndexMetadata.fromXContent(createParser(indexMetadata)).getSettings()); when(mockResponse.getIndexToSettings()).thenReturn(metadata); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java index 409596910e..99201aae4f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import static org.junit.jupiter.api.Assertions.assertAll; @@ -79,22 +78,20 @@ class OpenSearchRestClientTest { private static final String TEST_MAPPING_FILE = "mappings/accounts.json"; + @Mock(answer = RETURNS_DEEP_STUBS) private RestHighLevelClient restClient; private OpenSearchClient client; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private GetIndexResponse getIndexResponse; + @Mock private GetIndexResponse getIndexResponse; - private final ExprTupleValue exprTupleValue = ExprTupleValue.fromExprValueMap( - Map.of("id", new ExprIntegerValue(1))); + private final ExprTupleValue exprTupleValue = + ExprTupleValue.fromExprValueMap(Map.of("id", new ExprIntegerValue(1))); @BeforeEach void setUp() { @@ -103,8 +100,9 @@ void setUp() { @Test void is_index_exist() throws IOException { - when(restClient.indices() - .exists(any(), any())) // use any() because missing equals() in GetIndexRequest + when(restClient + .indices() + .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(true); assertTrue(client.exists("test")); @@ -112,8 +110,9 @@ void is_index_exist() throws IOException { @Test void is_index_not_exist() throws IOException { - when(restClient.indices() - .exists(any(), any())) // use any() because missing equals() in GetIndexRequest + when(restClient + .indices() + .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(false); assertFalse(client.exists("test")); @@ -129,11 +128,8 @@ void is_index_exist_with_exception() throws IOException { @Test void create_index() throws IOException { String indexName = "test"; - Map mappings = ImmutableMap.of( - "properties", - ImmutableMap.of("name", "text")); - when(restClient.indices() - .create(any(), any())) + Map mappings = ImmutableMap.of("properties", ImmutableMap.of("name", "text")); + when(restClient.indices().create(any(), any())) .thenReturn(new CreateIndexResponse(true, true, indexName)); client.createIndex(indexName, mappings); @@ -142,8 +138,7 @@ void create_index() throws IOException { @Test void create_index_with_IOException() throws IOException { when(restClient.indices().create(any(), any())).thenThrow(IOException.class); - assertThrows(IllegalStateException.class, - () -> client.createIndex("test", ImmutableMap.of())); + assertThrows(IllegalStateException.class, () -> client.createIndex("test", ImmutableMap.of())); } @Test @@ -167,58 +162,57 @@ void get_index_mappings() throws IOException { () -> assertEquals(10, mapping.size()), () -> assertEquals(17, parsedTypes.size()), () -> assertEquals("TEXT", mapping.get("address").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("address")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("address")), () -> assertEquals("INTEGER", mapping.get("age").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), - parsedTypes.get("age")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), parsedTypes.get("age")), () -> assertEquals("DOUBLE", mapping.get("balance").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Double), - parsedTypes.get("balance")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Double), parsedTypes.get("balance")), () -> assertEquals("KEYWORD", mapping.get("city").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("city")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("city")), () -> assertEquals("DATE", mapping.get("birthday").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("birthday")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Date), parsedTypes.get("birthday")), () -> assertEquals("GEO_POINT", mapping.get("location").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), - parsedTypes.get("location")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), parsedTypes.get("location")), // unknown type isn't parsed and ignored () -> assertFalse(mapping.containsKey("new_field")), () -> assertNull(parsedTypes.get("new_field")), () -> assertEquals("TEXT", mapping.get("field with spaces").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("field with spaces")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("field with spaces")), () -> assertEquals("TEXT", mapping.get("employer").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("employer")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("employer")), // `employer` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("employer")).getFields().size() > 0), + () -> assertTrue(((OpenSearchTextType) parsedTypes.get("employer")).getFields().size() > 0), () -> assertEquals("NESTED", mapping.get("projects").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Boolean), - parsedTypes.get("projects.active")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("projects.release")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects.members")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("projects.members.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Boolean), parsedTypes.get("projects.active")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Date), parsedTypes.get("projects.release")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects.members")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("projects.members.name")), () -> assertEquals("OBJECT", mapping.get("manager").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Object), - parsedTypes.get("manager")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("manager.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Object), parsedTypes.get("manager")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("manager.name")), // `manager.name` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("manager.name")) - .getFields().size() > 0), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("manager.address")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Long), - parsedTypes.get("manager.salary")) - ); + () -> + assertTrue( + ((OpenSearchTextType) parsedTypes.get("manager.name")).getFields().size() > 0), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("manager.address")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Long), parsedTypes.get("manager.salary"))); } @Test @@ -234,14 +228,11 @@ void get_index_max_result_windows_settings() throws IOException { Integer maxResultWindow = 1000; GetSettingsResponse response = mock(GetSettingsResponse.class); - Settings maxResultWindowSettings = Settings.builder() - .put("index.max_result_window", maxResultWindow) - .build(); + Settings maxResultWindowSettings = + Settings.builder().put("index.max_result_window", maxResultWindow).build(); Settings emptySettings = Settings.builder().build(); - Map indexToSettings = - mockSettings(indexName, maxResultWindowSettings); - Map indexToDefaultSettings = - mockSettings(indexName, emptySettings); + Map indexToSettings = mockSettings(indexName, maxResultWindowSettings); + Map indexToDefaultSettings = mockSettings(indexName, emptySettings); when(response.getIndexToSettings()).thenReturn(indexToSettings); when(response.getIndexToDefaultSettings()).thenReturn(indexToDefaultSettings); when(restClient.indices().getSettings(any(GetSettingsRequest.class), any())) @@ -258,14 +249,11 @@ void get_index_max_result_windows_default_settings() throws IOException { Integer maxResultWindow = 10000; GetSettingsResponse response = mock(GetSettingsResponse.class); - Settings maxResultWindowSettings = Settings.builder() - .put("index.max_result_window", maxResultWindow) - .build(); + Settings maxResultWindowSettings = + Settings.builder().put("index.max_result_window", maxResultWindow).build(); Settings emptySettings = Settings.builder().build(); - Map indexToSettings = - mockSettings(indexName, emptySettings); - Map indexToDefaultSettings = - mockSettings(indexName, maxResultWindowSettings); + Map indexToSettings = mockSettings(indexName, emptySettings); + Map indexToDefaultSettings = mockSettings(indexName, maxResultWindowSettings); when(response.getIndexToSettings()).thenReturn(indexToSettings); when(response.getIndexToDefaultSettings()).thenReturn(indexToDefaultSettings); when(restClient.indices().getSettings(any(GetSettingsRequest.class), any())) @@ -292,9 +280,7 @@ void search() throws IOException { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1L, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); when(searchHit.getSourceAsString()).thenReturn("{\"id\", 1}"); when(searchHit.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue); @@ -305,9 +291,13 @@ void search() throws IOException { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of("id")); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -327,9 +317,14 @@ void search_with_IOException() throws IOException { when(restClient.search(any(), any())).thenThrow(new IOException()); assertThrows( IllegalStateException.class, - () -> client.search(new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()))); + () -> + client.search( + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()))); } @Test @@ -349,28 +344,34 @@ void scroll_with_IOException() throws IOException { when(restClient.scroll(any(), any())).thenThrow(new IOException()); // First request run successfully - OpenSearchScrollRequest scrollRequest = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest scrollRequest = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.search(scrollRequest); - assertThrows( - IllegalStateException.class, () -> client.search(scrollRequest)); + assertThrows(IllegalStateException.class, () -> client.search(scrollRequest)); } @Test void schedule() { AtomicBoolean isRun = new AtomicBoolean(false); - client.schedule( - () -> isRun.set(true)); + client.schedule(() -> isRun.set(true)); assertTrue(isRun.get()); } @Test @SneakyThrows void cleanup() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); @@ -381,9 +382,13 @@ void cleanup() { @Test void cleanup_without_scrollId() throws IOException { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.cleanup(request); verify(restClient, never()).clearScroll(any(), any()); } @@ -393,9 +398,13 @@ void cleanup_without_scrollId() throws IOException { void cleanup_with_IOException() { when(restClient.clearScroll(any(), any())).thenThrow(new IOException()); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java index 4edb25aff5..defa97d8c8 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java index cda4377c60..38a4ad3199 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java index b60402e746..9b7e032c57 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertAll; @@ -27,54 +26,73 @@ public void type_of_ExprTextValue() { @Test public void getFields() { - var fields = Map.of( - "f1", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), - "f2", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), - "f3", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)); + var fields = + Map.of( + "f1", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), + "f2", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), + "f3", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)); assertEquals(fields, OpenSearchTextType.of(fields).getFields()); } @Test void non_text_types_arent_converted() { assertAll( - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(INTEGER))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(STRING))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword("field", OpenSearchDataType.of(INTEGER))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword("field", OpenSearchDataType.of(STRING))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer))), () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", STRING)), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", INTEGER)) - ); + () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", INTEGER))); } @Test void non_text_types_with_nested_objects_arent_converted() { - var objectType = OpenSearchDataType.of(OpenSearchDataType.MappingType.Object, - Map.of("subfield", OpenSearchDataType.of(STRING))); - var arrayType = OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested, - Map.of("subfield", OpenSearchDataType.of(STRING))); + var objectType = + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Object, + Map.of("subfield", OpenSearchDataType.of(STRING))); + var arrayType = + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Nested, + Map.of("subfield", OpenSearchDataType.of(STRING))); assertAll( () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", objectType)), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", arrayType)) - ); + () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", arrayType))); } @Test void text_type_without_fields_isnt_converted() { - assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); } @Test void text_type_with_fields_is_converted() { - var textWithKeywordType = OpenSearchTextType.of(Map.of("keyword", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); - assertEquals("field.keyword", - OpenSearchTextType.convertTextToKeyword("field", textWithKeywordType)); + var textWithKeywordType = + OpenSearchTextType.of( + Map.of("keyword", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + assertEquals( + "field.keyword", OpenSearchTextType.convertTextToKeyword("field", textWithKeywordType)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java index 827606a961..3d3a6a5996 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertAll; @@ -88,8 +87,8 @@ class OpenSearchExprValueFactoryTest { .put("timeNoMillisOrTimeV", OpenSearchDateType.of("time_no_millis || time")) .put("dateOrOrdinalDateV", OpenSearchDateType.of("date || ordinal_date")) .put("customFormatV", OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss")) - .put("customAndEpochMillisV", - OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss || epoch_millis")) + .put( + "customAndEpochMillisV", OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss || epoch_millis")) .put("incompleteFormatV", OpenSearchDateType.of("year")) .put("boolV", OpenSearchDataType.of(BOOLEAN)) .put("structV", OpenSearchDataType.of(STRUCT)) @@ -98,20 +97,22 @@ class OpenSearchExprValueFactoryTest { .put("arrayV", OpenSearchDataType.of(ARRAY)) .put("arrayV.info", OpenSearchDataType.of(STRING)) .put("arrayV.author", OpenSearchDataType.of(STRING)) - .put("deepNestedV", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) - .put("deepNestedV.year", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) + .put( + "deepNestedV", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) + .put( + "deepNestedV.year", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) .put("deepNestedV.year.timeV", OpenSearchDateType.of(TIME)) - .put("nestedV", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) + .put( + "nestedV", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) .put("nestedV.count", OpenSearchDataType.of(INTEGER)) .put("textV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Text)) - .put("textKeywordV", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))) + .put( + "textKeywordV", + OpenSearchTextType.of( + Map.of("words", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))) .put("ipV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip)) .put("geoV", OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint)) .put("binaryV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary)) @@ -124,9 +125,8 @@ class OpenSearchExprValueFactoryTest { public void constructNullValue() { assertAll( () -> assertEquals(nullValue(), tupleValue("{\"intV\":null}").get("intV")), - () -> assertEquals(nullValue(), constructFromObject("intV", null)), - () -> assertTrue(new OpenSearchJsonContent(null).isNull()) - ); + () -> assertEquals(nullValue(), constructFromObject("intV", null)), + () -> assertTrue(new OpenSearchJsonContent(null).isNull())); } @Test @@ -136,8 +136,7 @@ public void iterateArrayValue() throws JsonProcessingException { assertAll( () -> assertEquals("zz", arrayIt.next().stringValue()), () -> assertEquals("bb", arrayIt.next().stringValue()), - () -> assertFalse(arrayIt.hasNext()) - ); + () -> assertFalse(arrayIt.hasNext())); } @Test @@ -146,8 +145,7 @@ public void iterateArrayValueWithOneElement() throws JsonProcessingException { var arrayIt = new OpenSearchJsonContent(mapper.readTree("[\"zz\"]")).array(); assertAll( () -> assertEquals("zz", arrayIt.next().stringValue()), - () -> assertFalse(arrayIt.hasNext()) - ); + () -> assertFalse(arrayIt.hasNext())); } @Test @@ -160,8 +158,7 @@ public void constructByte() { assertAll( () -> assertEquals(byteValue((byte) 1), tupleValue("{\"byteV\":1}").get("byteV")), () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", 1)), - () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", "1.0")) - ); + () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", "1.0"))); } @Test @@ -169,8 +166,7 @@ public void constructShort() { assertAll( () -> assertEquals(shortValue((short) 1), tupleValue("{\"shortV\":1}").get("shortV")), () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", 1)), - () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", "1.0")) - ); + () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", "1.0"))); } @Test @@ -178,8 +174,7 @@ public void constructInteger() { assertAll( () -> assertEquals(integerValue(1), tupleValue("{\"intV\":1}").get("intV")), () -> assertEquals(integerValue(1), constructFromObject("intV", 1)), - () -> assertEquals(integerValue(1), constructFromObject("intV", "1.0")) - ); + () -> assertEquals(integerValue(1), constructFromObject("intV", "1.0"))); } @Test @@ -192,33 +187,29 @@ public void constructLong() { assertAll( () -> assertEquals(longValue(1L), tupleValue("{\"longV\":1}").get("longV")), () -> assertEquals(longValue(1L), constructFromObject("longV", 1L)), - () -> assertEquals(longValue(1L), constructFromObject("longV", "1.0")) - ); + () -> assertEquals(longValue(1L), constructFromObject("longV", "1.0"))); } @Test public void constructFloat() { assertAll( () -> assertEquals(floatValue(1f), tupleValue("{\"floatV\":1.0}").get("floatV")), - () -> assertEquals(floatValue(1f), constructFromObject("floatV", 1f)) - ); + () -> assertEquals(floatValue(1f), constructFromObject("floatV", 1f))); } @Test public void constructDouble() { assertAll( () -> assertEquals(doubleValue(1d), tupleValue("{\"doubleV\":1.0}").get("doubleV")), - () -> assertEquals(doubleValue(1d), constructFromObject("doubleV", 1d)) - ); + () -> assertEquals(doubleValue(1d), constructFromObject("doubleV", 1d))); } @Test public void constructString() { assertAll( - () -> assertEquals(stringValue("text"), - tupleValue("{\"stringV\":\"text\"}").get("stringV")), - () -> assertEquals(stringValue("text"), constructFromObject("stringV", "text")) - ); + () -> + assertEquals(stringValue("text"), tupleValue("{\"stringV\":\"text\"}").get("stringV")), + () -> assertEquals(stringValue("text"), constructFromObject("stringV", "text"))); } @Test @@ -228,23 +219,25 @@ public void constructBoolean() { () -> assertEquals(booleanValue(true), constructFromObject("boolV", true)), () -> assertEquals(booleanValue(true), constructFromObject("boolV", "true")), () -> assertEquals(booleanValue(true), constructFromObject("boolV", 1)), - () -> assertEquals(booleanValue(false), constructFromObject("boolV", 0)) - ); + () -> assertEquals(booleanValue(false), constructFromObject("boolV", 0))); } @Test public void constructText() { assertAll( - () -> assertEquals(new OpenSearchExprTextValue("text"), - tupleValue("{\"textV\":\"text\"}").get("textV")), - () -> assertEquals(new OpenSearchExprTextValue("text"), - constructFromObject("textV", "text")), - - () -> assertEquals(new OpenSearchExprTextValue("text"), - tupleValue("{\"textKeywordV\":\"text\"}").get("textKeywordV")), - () -> assertEquals(new OpenSearchExprTextValue("text"), - constructFromObject("textKeywordV", "text")) - ); + () -> + assertEquals( + new OpenSearchExprTextValue("text"), + tupleValue("{\"textV\":\"text\"}").get("textV")), + () -> + assertEquals(new OpenSearchExprTextValue("text"), constructFromObject("textV", "text")), + () -> + assertEquals( + new OpenSearchExprTextValue("text"), + tupleValue("{\"textKeywordV\":\"text\"}").get("textKeywordV")), + () -> + assertEquals( + new OpenSearchExprTextValue("text"), constructFromObject("textKeywordV", "text"))); } @Test @@ -252,95 +245,122 @@ public void constructDates() { ExprValue dateStringV = constructFromObject("dateStringV", "1984-04-12"); assertAll( () -> assertEquals(new ExprDateValue("1984-04-12"), dateStringV), - () -> assertEquals(new ExprDateValue( - LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), - constructFromObject("dateV", 450576000000L)), - () -> assertEquals(new ExprDateValue("1984-04-12"), - constructFromObject("dateOrOrdinalDateV", "1984-103")), - () -> assertEquals(new ExprDateValue("2015-01-01"), - tupleValue("{\"dateV\":\"2015-01-01\"}").get("dateV")) - ); + () -> + assertEquals( + new ExprDateValue( + LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), + constructFromObject("dateV", 450576000000L)), + () -> + assertEquals( + new ExprDateValue("1984-04-12"), + constructFromObject("dateOrOrdinalDateV", "1984-103")), + () -> + assertEquals( + new ExprDateValue("2015-01-01"), + tupleValue("{\"dateV\":\"2015-01-01\"}").get("dateV"))); } @Test public void constructTimes() { - ExprValue timeStringV = constructFromObject("timeStringV","12:10:30.000Z"); + ExprValue timeStringV = constructFromObject("timeStringV", "12:10:30.000Z"); assertAll( () -> assertTrue(timeStringV.isDateTime()), () -> assertTrue(timeStringV instanceof ExprTimeValue), () -> assertEquals(new ExprTimeValue("12:10:30"), timeStringV), - () -> assertEquals(new ExprTimeValue(LocalTime.from( - Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), - constructFromObject("timeV", 1420070400001L)), - () -> assertEquals(new ExprTimeValue("09:07:42.000"), - constructFromObject("timeNoMillisOrTimeV", "09:07:42.000Z")), - () -> assertEquals(new ExprTimeValue("09:07:42"), - tupleValue("{\"timeV\":\"09:07:42\"}").get("timeV")) - ); + () -> + assertEquals( + new ExprTimeValue( + LocalTime.from(Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), + constructFromObject("timeV", 1420070400001L)), + () -> + assertEquals( + new ExprTimeValue("09:07:42.000"), + constructFromObject("timeNoMillisOrTimeV", "09:07:42.000Z")), + () -> + assertEquals( + new ExprTimeValue("09:07:42"), + tupleValue("{\"timeV\":\"09:07:42\"}").get("timeV"))); } @Test public void constructDatetime() { assertAll( - () -> assertEquals( - new ExprTimestampValue("2015-01-01 00:00:00"), - tupleValue("{\"timestampV\":\"2015-01-01\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01T12:10:30Z\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01T12:10:30\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01 12:10:30\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("timestampV", 1420070400001L)), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("timestampV", Instant.ofEpochMilli(1420070400001L))), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("epochMillisV", "1420070400001")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("epochMillisV", 1420070400001L)), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochSecond(142704001L)), - constructFromObject("epochSecondV", 142704001L)), - () -> assertEquals( - new ExprTimeValue("10:20:30"), - tupleValue("{ \"timeCustomV\" : 102030 }").get("timeCustomV")), - () -> assertEquals( - new ExprDateValue("1961-04-12"), - tupleValue("{ \"dateCustomV\" : 19610412 }").get("dateCustomV")), - () -> assertEquals( - new ExprTimestampValue("1984-05-10 20:30:40"), - tupleValue("{ \"dateTimeCustomV\" : 19840510203040 }").get("dateTimeCustomV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - constructFromObject("timestampV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("dateOrEpochMillisV", "1420070400001")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 00:00:00"), + tupleValue("{\"timestampV\":\"2015-01-01\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01T12:10:30Z\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01T12:10:30\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01 12:10:30\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("timestampV", 1420070400001L)), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("timestampV", Instant.ofEpochMilli(1420070400001L))), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("epochMillisV", "1420070400001")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("epochMillisV", 1420070400001L)), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochSecond(142704001L)), + constructFromObject("epochSecondV", 142704001L)), + () -> + assertEquals( + new ExprTimeValue("10:20:30"), + tupleValue("{ \"timeCustomV\" : 102030 }").get("timeCustomV")), + () -> + assertEquals( + new ExprDateValue("1961-04-12"), + tupleValue("{ \"dateCustomV\" : 19610412 }").get("dateCustomV")), + () -> + assertEquals( + new ExprTimestampValue("1984-05-10 20:30:40"), + tupleValue("{ \"dateTimeCustomV\" : 19840510203040 }").get("dateTimeCustomV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + constructFromObject("timestampV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprDatetimeValue("2015-01-01 12:10:30"), + constructFromObject("datetimeV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprDatetimeValue("2015-01-01 12:10:30"), + constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("dateOrEpochMillisV", "1420070400001")), // case: timestamp-formatted field, but it only gets a time: should match a time - () -> assertEquals( - new ExprTimeValue("19:36:22"), - tupleValue("{\"timestampV\":\"19:36:22\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimeValue("19:36:22"), + tupleValue("{\"timestampV\":\"19:36:22\"}").get("timestampV")), // case: timestamp-formatted field, but it only gets a date: should match a date - () -> assertEquals( - new ExprDateValue("2011-03-03"), - tupleValue("{\"timestampV\":\"2011-03-03\"}").get("timestampV")) - ); + () -> + assertEquals( + new ExprDateValue("2011-03-03"), + tupleValue("{\"timestampV\":\"2011-03-03\"}").get("timestampV"))); } @Test @@ -350,11 +370,11 @@ public void constructDatetime_fromCustomFormat() { constructFromObject("customFormatV", "2015-01-01-12-10-30")); IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("customFormatV", "2015-01-01 12-10-30")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12-10-30\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12-10-30\" failed, unsupported format.", exception.getMessage()); assertEquals( @@ -369,91 +389,87 @@ public void constructDatetime_fromCustomFormat() { @Test public void constructDatetimeFromUnsupportedFormat_ThrowIllegalArgumentException() { IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timestampV", "2015-01-01 12:10")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, unsupported format.", exception.getMessage()); // fail with missing seconds exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateOrEpochMillisV", "2015-01-01 12:10")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, unsupported format.", exception.getMessage()); } @Test public void constructTimeFromUnsupportedFormat_ThrowIllegalArgumentException() { - IllegalArgumentException exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("timeV", "2015-01-01")); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timeV", "2015-01-01")); assertEquals( - "Construct TIME from \"2015-01-01\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct TIME from \"2015-01-01\" failed, unsupported format.", exception.getMessage()); - exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("timeStringV", "10:10")); + exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timeStringV", "10:10")); assertEquals( - "Construct TIME from \"10:10\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct TIME from \"10:10\" failed, unsupported format.", exception.getMessage()); } @Test public void constructDateFromUnsupportedFormat_ThrowIllegalArgumentException() { - IllegalArgumentException exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("dateV", "12:10:10")); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateV", "12:10:10")); assertEquals( - "Construct DATE from \"12:10:10\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct DATE from \"12:10:10\" failed, unsupported format.", exception.getMessage()); - exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("dateStringV", "abc")); - assertEquals( - "Construct DATE from \"abc\" failed, " - + "unsupported format.", - exception.getMessage()); + exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateStringV", "abc")); + assertEquals("Construct DATE from \"abc\" failed, unsupported format.", exception.getMessage()); } @Test public void constructDateFromIncompleteFormat() { - assertEquals( - new ExprDateValue("1984-01-01"), - constructFromObject("incompleteFormatV", "1984")); + assertEquals(new ExprDateValue("1984-01-01"), constructFromObject("incompleteFormatV", "1984")); } @Test public void constructArray() { assertEquals( - new ExprCollectionValue(List.of(new ExprTupleValue( - new LinkedHashMap() { - { - put("info", stringValue("zz")); - put("author", stringValue("au")); - } - }))), + new ExprCollectionValue( + List.of( + new ExprTupleValue( + new LinkedHashMap() { + { + put("info", stringValue("zz")); + put("author", stringValue("au")); + } + }))), tupleValue("{\"arrayV\":[{\"info\":\"zz\",\"author\":\"au\"}]}").get("arrayV")); assertEquals( - new ExprCollectionValue(List.of(new ExprTupleValue( - new LinkedHashMap() { - { - put("info", stringValue("zz")); - put("author", stringValue("au")); - } - }))), - constructFromObject("arrayV", List.of( - ImmutableMap.of("info", "zz", "author", "au")))); + new ExprCollectionValue( + List.of( + new ExprTupleValue( + new LinkedHashMap() { + { + put("info", stringValue("zz")); + put("author", stringValue("au")); + } + }))), + constructFromObject("arrayV", List.of(ImmutableMap.of("info", "zz", "author", "au")))); } @Test public void constructArrayOfStrings() { - assertEquals(new ExprCollectionValue( - List.of(stringValue("zz"), stringValue("au"))), + assertEquals( + new ExprCollectionValue(List.of(stringValue("zz"), stringValue("au"))), constructFromObject("arrayV", List.of("zz", "au"))); } @@ -461,100 +477,71 @@ public void constructArrayOfStrings() { public void constructNestedArraysOfStrings() { assertEquals( new ExprCollectionValue( - List.of( - collectionValue( - List.of("zz", "au") - ), - collectionValue( - List.of("ss") - ) - ) - ), - tupleValueWithArraySupport( - "{\"stringV\":[" - + "[\"zz\", \"au\"]," - + "[\"ss\"]" - + "]}" - ).get("stringV")); + List.of(collectionValue(List.of("zz", "au")), collectionValue(List.of("ss")))), + tupleValueWithArraySupport("{\"stringV\":[ [\"zz\", \"au\"], [\"ss\"] ]}").get("stringV")); } @Test public void constructNestedArraysOfStringsReturnsFirstIndex() { assertEquals( - stringValue("zz"), - tupleValue( - "{\"stringV\":[" - + "[\"zz\", \"au\"]," - + "[\"ss\"]" - + "]}" - ).get("stringV")); + stringValue("zz"), tupleValue("{\"stringV\":[[\"zz\", \"au\"],[\"ss\"]]}").get("stringV")); } @Test public void constructMultiNestedArraysOfStringsReturnsFirstIndex() { assertEquals( stringValue("z"), - tupleValue( - "{\"stringV\":" - + "[\"z\"," - + "[\"s\"]," - + "[\"zz\", \"au\"]" - + "]}" - ).get("stringV")); + tupleValue("{\"stringV\":" + "[\"z\",[\"s\"],[\"zz\", \"au\"]]}").get("stringV")); } @Test public void constructArrayOfInts() { - assertEquals(new ExprCollectionValue( - List.of(integerValue(1), integerValue(2))), + assertEquals( + new ExprCollectionValue(List.of(integerValue(1), integerValue(2))), constructFromObject("arrayV", List.of(1, 2))); } @Test public void constructArrayOfShorts() { // Shorts are treated same as integer - assertEquals(new ExprCollectionValue( - List.of(shortValue((short)3), shortValue((short)4))), + assertEquals( + new ExprCollectionValue(List.of(shortValue((short) 3), shortValue((short) 4))), constructFromObject("arrayV", List.of(3, 4))); } @Test public void constructArrayOfLongs() { - assertEquals(new ExprCollectionValue( - List.of(longValue(123456789L), longValue(987654321L))), + assertEquals( + new ExprCollectionValue(List.of(longValue(123456789L), longValue(987654321L))), constructFromObject("arrayV", List.of(123456789L, 987654321L))); } @Test public void constructArrayOfFloats() { - assertEquals(new ExprCollectionValue( - List.of(floatValue(3.14f), floatValue(4.13f))), + assertEquals( + new ExprCollectionValue(List.of(floatValue(3.14f), floatValue(4.13f))), constructFromObject("arrayV", List.of(3.14f, 4.13f))); } @Test public void constructArrayOfDoubles() { - assertEquals(new ExprCollectionValue( - List.of(doubleValue(9.1928374756D), doubleValue(4.987654321D))), + assertEquals( + new ExprCollectionValue(List.of(doubleValue(9.1928374756D), doubleValue(4.987654321D))), constructFromObject("arrayV", List.of(9.1928374756D, 4.987654321D))); } @Test public void constructArrayOfBooleans() { - assertEquals(new ExprCollectionValue( - List.of(booleanValue(true), booleanValue(false))), + assertEquals( + new ExprCollectionValue(List.of(booleanValue(true), booleanValue(false))), constructFromObject("arrayV", List.of(true, false))); } @Test public void constructNestedObjectArrayNode() { - assertEquals(collectionValue( - List.of( - Map.of("count", 1), - Map.of("count", 2) - )), - tupleValueWithArraySupport("{\"nestedV\":[{\"count\":1},{\"count\":2}]}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(Map.of("count", 1), Map.of("count", 2))), + tupleValueWithArraySupport("{\"nestedV\":[{\"count\":1},{\"count\":2}]}").get("nestedV")); } @Test @@ -562,84 +549,70 @@ public void constructNestedObjectArrayOfObjectArraysNode() { assertEquals( collectionValue( List.of( - Map.of("year", + Map.of( + "year", List.of( Map.of("timeV", new ExprTimeValue("09:07:42")), - Map.of("timeV", new ExprTimeValue("09:07:42")) - ) - ), - Map.of("year", + Map.of("timeV", new ExprTimeValue("09:07:42")))), + Map.of( + "year", List.of( Map.of("timeV", new ExprTimeValue("09:07:42")), - Map.of("timeV", new ExprTimeValue("09:07:42")) - ) - ) - ) - ), + Map.of("timeV", new ExprTimeValue("09:07:42")))))), tupleValueWithArraySupport( - "{\"deepNestedV\":" - + "[" - + "{\"year\":" - + "[" - + "{\"timeV\":\"09:07:42\"}," - + "{\"timeV\":\"09:07:42\"}" - + "]" - + "}," - + "{\"year\":" - + "[" - + "{\"timeV\":\"09:07:42\"}," - + "{\"timeV\":\"09:07:42\"}" - + "]" - + "}" - + "]" - + "}") + "{\"deepNestedV\":" + + " [" + + " {\"year\":" + + " [" + + " {\"timeV\":\"09:07:42\"}," + + " {\"timeV\":\"09:07:42\"}" + + " ]" + + " }," + + " {\"year\":" + + " [" + + " {\"timeV\":\"09:07:42\"}," + + " {\"timeV\":\"09:07:42\"}" + + " ]" + + " }" + + " ]" + + "}") .get("deepNestedV")); } @Test public void constructNestedArrayNode() { - assertEquals(collectionValue( - List.of( - 1969, - 2011 - )), - tupleValueWithArraySupport("{\"nestedV\":[1969,2011]}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(1969, 2011)), + tupleValueWithArraySupport("{\"nestedV\":[1969,2011]}").get("nestedV")); } @Test public void constructNestedObjectNode() { - assertEquals(collectionValue( - List.of( - Map.of("count", 1969) - )), - tupleValue("{\"nestedV\":{\"count\":1969}}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(Map.of("count", 1969))), + tupleValue("{\"nestedV\":{\"count\":1969}}").get("nestedV")); } @Test public void constructArrayOfGeoPoints() { - assertEquals(new ExprCollectionValue( + assertEquals( + new ExprCollectionValue( List.of( new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), - new OpenSearchExprGeoPointValue(-33.6123556, 66.287449)) - ), + new OpenSearchExprGeoPointValue(-33.6123556, 66.287449))), tupleValueWithArraySupport( - "{\"geoV\":[" - + "{\"lat\":42.60355556,\"lon\":-97.25263889}," - + "{\"lat\":-33.6123556,\"lon\":66.287449}" - + "]}" - ).get("geoV") - ); + "{\"geoV\":[" + + "{\"lat\":42.60355556,\"lon\":-97.25263889}," + + "{\"lat\":-33.6123556,\"lon\":66.287449}" + + "]}") + .get("geoV")); } @Test public void constructArrayOfIPsReturnsFirstIndex() { assertEquals( new OpenSearchExprIpValue("192.168.0.1"), - tupleValue("{\"ipV\":[\"192.168.0.1\",\"192.168.0.2\"]}") - .get("ipV") - ); + tupleValue("{\"ipV\":[\"192.168.0.1\",\"192.168.0.2\"]}").get("ipV")); } @Test @@ -647,8 +620,7 @@ public void constructBinaryArrayReturnsFirstIndex() { assertEquals( new OpenSearchExprBinaryValue("U29tZSBiaWsdfsdfgYmxvYg=="), tupleValue("{\"binaryV\":[\"U29tZSBiaWsdfsdfgYmxvYg==\",\"U987yuhjjiy8jhk9vY+98jjdf\"]}") - .get("binaryV") - ); + .get("binaryV")); } @Test @@ -656,26 +628,21 @@ public void constructArrayOfCustomEpochMillisReturnsFirstIndex() { assertEquals( new ExprDatetimeValue("2015-01-01 12:10:30"), tupleValue("{\"customAndEpochMillisV\":[\"2015-01-01 12:10:30\",\"1999-11-09 01:09:44\"]}") - .get("customAndEpochMillisV") - ); + .get("customAndEpochMillisV")); } @Test public void constructArrayOfDateStringsReturnsFirstIndex() { assertEquals( new ExprDateValue("1984-04-12"), - tupleValue("{\"dateStringV\":[\"1984-04-12\",\"2033-05-03\"]}") - .get("dateStringV") - ); + tupleValue("{\"dateStringV\":[\"1984-04-12\",\"2033-05-03\"]}").get("dateStringV")); } @Test public void constructArrayOfTimeStringsReturnsFirstIndex() { assertEquals( new ExprTimeValue("12:10:30"), - tupleValue("{\"timeStringV\":[\"12:10:30.000Z\",\"18:33:55.000Z\"]}") - .get("timeStringV") - ); + tupleValue("{\"timeStringV\":[\"12:10:30.000Z\",\"18:33:55.000Z\"]}").get("timeStringV")); } @Test @@ -683,8 +650,7 @@ public void constructArrayOfEpochMillis() { assertEquals( new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), tupleValue("{\"dateOrEpochMillisV\":[\"1420070400001\",\"1454251113333\"]}") - .get("dateOrEpochMillisV") - ); + .get("dateOrEpochMillisV")); } @Test @@ -711,54 +677,64 @@ public void constructStruct() { @Test public void constructIP() { - assertEquals(new OpenSearchExprIpValue("192.168.0.1"), + assertEquals( + new OpenSearchExprIpValue("192.168.0.1"), tupleValue("{\"ipV\":\"192.168.0.1\"}").get("ipV")); } @Test public void constructGeoPoint() { - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), tupleValue("{\"geoV\":{\"lat\":42.60355556,\"lon\":-97.25263889}}").get("geoV")); - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), tupleValue("{\"geoV\":{\"lat\":\"42.60355556\",\"lon\":\"-97.25263889\"}}").get("geoV")); - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), constructFromObject("geoV", "42.60355556,-97.25263889")); } @Test public void constructGeoPointFromUnsupportedFormatShouldThrowException() { IllegalStateException exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":[42.60355556,-97.25263889]}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lon\":-97.25263889}}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":-97.25263889}}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":true,\"lon\":-97.25263889}}").get("geoV")); assertEquals("latitude must be number value, but got value: true", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":42.60355556,\"lon\":false}}").get("geoV")); assertEquals("longitude must be number value, but got value: false", exception.getMessage()); } @Test public void constructBinary() { - assertEquals(new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="), + assertEquals( + new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="), tupleValue("{\"binaryV\":\"U29tZSBiaW5hcnkgYmxvYg==\"}").get("binaryV")); } @@ -769,14 +745,16 @@ public void constructBinary() { @Test public void constructFromOpenSearchArrayReturnFirstElement() { assertEquals(integerValue(1), tupleValue("{\"intV\":[1, 2, 3]}").get("intV")); - assertEquals(new ExprTupleValue( - new LinkedHashMap() { - { - put("id", integerValue(1)); - put("state", stringValue("WA")); - } - }), tupleValue("{\"structV\":[{\"id\":1,\"state\":\"WA\"},{\"id\":2,\"state\":\"CA\"}]}}") - .get("structV")); + assertEquals( + new ExprTupleValue( + new LinkedHashMap() { + { + put("id", integerValue(1)); + put("state", stringValue("WA")); + } + }), + tupleValue("{\"structV\":[{\"id\":1,\"state\":\"WA\"},{\"id\":2,\"state\":\"CA\"}]}}") + .get("structV")); } @Test @@ -799,19 +777,13 @@ public void constructUnsupportedTypeThrowException() { new OpenSearchExprValueFactory(Map.of("type", new TestType())); IllegalStateException exception = assertThrows( - IllegalStateException.class, - () -> exprValueFactory.construct("{\"type\":1}", false) - ); + IllegalStateException.class, () -> exprValueFactory.construct("{\"type\":1}", false)); assertEquals("Unsupported type: TEST_TYPE for value: 1.", exception.getMessage()); exception = assertThrows( - IllegalStateException.class, - () -> exprValueFactory.construct("type", 1, false) - ); - assertEquals( - "Unsupported type: TEST_TYPE for value: 1.", - exception.getMessage()); + IllegalStateException.class, () -> exprValueFactory.construct("type", 1, false)); + assertEquals("Unsupported type: TEST_TYPE for value: 1.", exception.getMessage()); } @Test @@ -820,21 +792,21 @@ public void constructUnsupportedTypeThrowException() { public void factoryMappingsAreExtendableWithoutOverWrite() throws NoSuchFieldException, IllegalAccessException { var factory = new OpenSearchExprValueFactory(Map.of("value", OpenSearchDataType.of(INTEGER))); - factory.extendTypeMapping(Map.of( - "value", OpenSearchDataType.of(DOUBLE), - "agg", OpenSearchDataType.of(DATE))); + factory.extendTypeMapping( + Map.of( + "value", OpenSearchDataType.of(DOUBLE), + "agg", OpenSearchDataType.of(DATE))); // extract private field for testing purposes var field = factory.getClass().getDeclaredField("typeMapping"); field.setAccessible(true); @SuppressWarnings("unchecked") - var mapping = (Map)field.get(factory); + var mapping = (Map) field.get(factory); assertAll( () -> assertEquals(2, mapping.size()), () -> assertTrue(mapping.containsKey("value")), () -> assertTrue(mapping.containsKey("agg")), () -> assertEquals(OpenSearchDataType.of(INTEGER), mapping.get("value")), - () -> assertEquals(OpenSearchDataType.of(DATE), mapping.get("agg")) - ); + () -> assertEquals(OpenSearchDataType.of(DATE), mapping.get("agg"))); } public Map tupleValue(String jsonString) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java index 6d2b9b13ce..047a510180 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java @@ -32,17 +32,13 @@ @ExtendWith(MockitoExtension.class) class OpenSearchQueryManagerTest { - @Mock - private QueryId queryId; + @Mock private QueryId queryId; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; - @Mock - private UnresolvedPlan plan; + @Mock private UnresolvedPlan plan; - @Mock - private ResponseListener listener; + @Mock private ResponseListener listener; @Test public void submitQuery() { @@ -51,19 +47,20 @@ public void submitQuery() { when(nodeClient.threadPool()).thenReturn(threadPool); AtomicBoolean isRun = new AtomicBoolean(false); - AbstractPlan queryPlan = new QueryPlan(queryId, plan, queryService, listener) { - @Override - public void execute() { - isRun.set(true); - } - }; + AbstractPlan queryPlan = + new QueryPlan(queryId, plan, queryService, listener) { + @Override + public void execute() { + isRun.set(true); + } + }; doAnswer( - invocation -> { - Runnable task = invocation.getArgument(0); - task.run(); - return null; - }) + invocation -> { + Runnable task = invocation.getArgument(0); + task.run(); + return null; + }) .when(threadPool) .schedule(any(), any(), any()); new OpenSearchQueryManager(nodeClient).submit(queryPlan); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java index 96e85a8173..26bcdf6d89 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -27,17 +26,13 @@ @ExtendWith(MockitoExtension.class) class ResourceMonitorPlanTest { - @Mock - private PhysicalPlan plan; + @Mock private PhysicalPlan plan; - @Mock - private ResourceMonitor resourceMonitor; + @Mock private ResourceMonitor resourceMonitor; - @Mock - private PhysicalPlanNodeVisitor visitor; + @Mock private PhysicalPlanNodeVisitor visitor; - @Mock - private Object context; + @Mock private Object context; private ResourceMonitorPlan monitorPlan; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java index af4cdc8ce6..a61f7343e6 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -20,11 +19,9 @@ @ExtendWith(MockitoExtension.class) class OpenSearchMemoryHealthyTest { - @Mock - private OpenSearchMemoryHealthy.RandomFail randomFail; + @Mock private OpenSearchMemoryHealthy.RandomFail randomFail; - @Mock - private OpenSearchMemoryHealthy.MemoryUsage memoryUsage; + @Mock private OpenSearchMemoryHealthy.MemoryUsage memoryUsage; private OpenSearchMemoryHealthy monitor; @@ -45,7 +42,8 @@ void memoryUsageExceedLimitFastFailure() { when(memoryUsage.usage()).thenReturn(10L); when(randomFail.shouldFail()).thenReturn(true); - assertThrows(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class, + assertThrows( + OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class, () -> monitor.isMemoryHealthy(9L)); } @@ -54,7 +52,8 @@ void memoryUsageExceedLimitWithoutFastFailure() { when(memoryUsage.usage()).thenReturn(10L); when(randomFail.shouldFail()).thenReturn(false); - assertThrows(OpenSearchMemoryHealthy.MemoryUsageExceedException.class, + assertThrows( + OpenSearchMemoryHealthy.MemoryUsageExceedException.class, () -> monitor.isMemoryHealthy(9L)); } @@ -72,8 +71,7 @@ void randomFail() { @Test void setMemoryUsage() { - OpenSearchMemoryHealthy.MemoryUsage usage = - new OpenSearchMemoryHealthy.MemoryUsage(); + OpenSearchMemoryHealthy.MemoryUsage usage = new OpenSearchMemoryHealthy.MemoryUsage(); assertTrue(usage.usage() > 0); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java index cd27b0710e..f56d8cb81b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -24,11 +23,9 @@ @ExtendWith(MockitoExtension.class) class OpenSearchResourceMonitorTest { - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private OpenSearchMemoryHealthy memoryMonitor; + @Mock private OpenSearchMemoryHealthy memoryMonitor; @BeforeEach public void setup() { @@ -47,8 +44,8 @@ void isHealthy() { @Test void notHealthyFastFailure() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); @@ -58,8 +55,8 @@ void notHealthyFastFailure() { @Test void notHealthyWithRetry() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedException.class); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedException.class); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); @@ -70,8 +67,9 @@ void notHealthyWithRetry() { @Test void healthyWithRetry() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedException.class).thenReturn(true); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedException.class) + .thenReturn(true); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java index b6966f2403..d2bc5b0641 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -41,32 +40,23 @@ @ExtendWith(MockitoExtension.class) public class OpenSearchQueryRequestTest { - @Mock - private Function searchAction; + @Mock private Function searchAction; - @Mock - private Function scrollAction; + @Mock private Function scrollAction; - @Mock - private Consumer cleanAction; + @Mock private Consumer cleanAction; - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private SearchHits searchHits; + @Mock private SearchHits searchHits; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private SearchSourceBuilder sourceBuilder; + @Mock private SearchSourceBuilder sourceBuilder; - @Mock - private FetchSourceContext fetchSourceContext; + @Mock private FetchSourceContext fetchSourceContext; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; private final OpenSearchQueryRequest request = new OpenSearchQueryRequest("test", 200, factory, List.of()); @@ -76,12 +66,9 @@ public class OpenSearchQueryRequestTest { @Test void search() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); @@ -96,12 +83,9 @@ void search() { @Test void search_withoutContext() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); @@ -113,12 +97,9 @@ void search_withoutContext() { @Test void search_withIncludes() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); String[] includes = {"_id", "_index"}; when(searchAction.apply(any())).thenReturn(searchResponse); @@ -144,13 +125,15 @@ void clean() { void searchRequest() { request.getSourceBuilder().query(QueryBuilders.termQuery("name", "John")); - assertSearchRequest(new SearchRequest() - .indices("test") - .source(new SearchSourceBuilder() - .timeout(DEFAULT_QUERY_TIMEOUT) - .from(0) - .size(200) - .query(QueryBuilders.termQuery("name", "John"))), + assertSearchRequest( + new SearchRequest() + .indices("test") + .source( + new SearchSourceBuilder() + .timeout(DEFAULT_QUERY_TIMEOUT) + .from(0) + .size(200) + .query(QueryBuilders.termQuery("name", "John"))), request); } @@ -161,28 +144,31 @@ void searchCrossClusterRequest() { assertSearchRequest( new SearchRequest() .indices("ccs:test") - .source(new SearchSourceBuilder() - .timeout(DEFAULT_QUERY_TIMEOUT) - .from(0) - .size(200) - .query(QueryBuilders.termQuery("name", "John"))), + .source( + new SearchSourceBuilder() + .timeout(DEFAULT_QUERY_TIMEOUT) + .from(0) + .size(200) + .query(QueryBuilders.termQuery("name", "John"))), remoteRequest); } @Test void writeTo_unsupported() { - assertThrows(UnsupportedOperationException.class, - () -> request.writeTo(mock(StreamOutput.class))); + assertThrows( + UnsupportedOperationException.class, () -> request.writeTo(mock(StreamOutput.class))); } private void assertSearchRequest(SearchRequest expected, OpenSearchQueryRequest request) { - Function querySearch = searchRequest -> { - assertEquals(expected, searchRequest); - return when(mock(SearchResponse.class).getHits()) - .thenReturn(new SearchHits(new SearchHit[0], - new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) - .getMock(); - }; + Function querySearch = + searchRequest -> { + assertEquals(expected, searchRequest); + return when(mock(SearchResponse.class).getHits()) + .thenReturn( + new SearchHits( + new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) + .getMock(); + }; request.search(querySearch, searchScrollRequest -> null); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java index 483ea1290e..5bb0a2207b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.Assert.assertThrows; @@ -71,11 +70,10 @@ class OpenSearchRequestBuilderTest { private static final Integer DEFAULT_LIMIT = 200; private static final Integer MAX_RESULT_WINDOW = 500; - private static final OpenSearchRequest.IndexName indexName - = new OpenSearchRequest.IndexName("test"); + private static final OpenSearchRequest.IndexName indexName = + new OpenSearchRequest.IndexName("test"); - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; private OpenSearchRequestBuilder requestBuilder; @@ -99,7 +97,8 @@ void build_query_request() { .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) .trackScores(true), - exprValueFactory, List.of()), + exprValueFactory, + List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -111,12 +110,14 @@ void build_scroll_request_with_correct_size() { assertEquals( new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), new SearchSourceBuilder() .from(offset) .size(MAX_RESULT_WINDOW - offset) .timeout(DEFAULT_QUERY_TIMEOUT), - exprValueFactory, List.of()), + exprValueFactory, + List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -126,33 +127,32 @@ void test_push_down_query() { requestBuilder.pushDownFilter(query); var r = requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT); - Function querySearch = searchRequest -> { - assertEquals( - new SearchSourceBuilder() - .from(DEFAULT_OFFSET) - .size(DEFAULT_LIMIT) - .timeout(DEFAULT_QUERY_TIMEOUT) - .query(query) - .sort(DOC_FIELD_NAME, ASC), - searchRequest.source() - ); - return mock(); - }; - Function scrollSearch = searchScrollRequest -> { - throw new UnsupportedOperationException(); - }; + Function querySearch = + searchRequest -> { + assertEquals( + new SearchSourceBuilder() + .from(DEFAULT_OFFSET) + .size(DEFAULT_LIMIT) + .timeout(DEFAULT_QUERY_TIMEOUT) + .query(query) + .sort(DOC_FIELD_NAME, ASC), + searchRequest.source()); + return mock(); + }; + Function scrollSearch = + searchScrollRequest -> { + throw new UnsupportedOperationException(); + }; r.search(querySearch, scrollSearch); - } @Test void test_push_down_aggregation() { - AggregationBuilder aggBuilder = AggregationBuilders.composite( - "composite_buckets", - Collections.singletonList(new TermsValuesSourceBuilder("longA"))); + AggregationBuilder aggBuilder = + AggregationBuilders.composite( + "composite_buckets", Collections.singletonList(new TermsValuesSourceBuilder("longA"))); OpenSearchAggregationResponseParser responseParser = - new CompositeAggregationParser( - new SingleValueParser("AVG(intA)")); + new CompositeAggregationParser(new SingleValueParser("AVG(intA)")); requestBuilder.pushDownAggregation(Pair.of(List.of(aggBuilder), responseParser)); assertEquals( @@ -161,8 +161,7 @@ void test_push_down_aggregation() { .size(0) .timeout(DEFAULT_QUERY_TIMEOUT) .aggregation(aggBuilder), - requestBuilder.getSourceBuilder() - ); + requestBuilder.getSourceBuilder()); verify(exprValueFactory).setParser(responseParser); } @@ -184,21 +183,25 @@ void test_push_down_query_and_sort() { requestBuilder); } - void assertSearchSourceBuilder(SearchSourceBuilder expected, - OpenSearchRequestBuilder requestBuilder) + void assertSearchSourceBuilder( + SearchSourceBuilder expected, OpenSearchRequestBuilder requestBuilder) throws UnsupportedOperationException { - Function querySearch = searchRequest -> { - assertEquals(expected, searchRequest.source()); - return when(mock(SearchResponse.class).getHits()) - .thenReturn(new SearchHits(new SearchHit[0], new TotalHits(0, - TotalHits.Relation.EQUAL_TO), 0.0f)) - .getMock(); - }; - Function scrollSearch = searchScrollRequest -> { - throw new UnsupportedOperationException(); - }; - requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT).search( - querySearch, scrollSearch); + Function querySearch = + searchRequest -> { + assertEquals(expected, searchRequest.source()); + return when(mock(SearchResponse.class).getHits()) + .thenReturn( + new SearchHits( + new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) + .getMock(); + }; + Function scrollSearch = + searchScrollRequest -> { + throw new UnsupportedOperationException(); + }; + requestBuilder + .build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT) + .search(querySearch, scrollSearch); } @Test @@ -231,9 +234,8 @@ void test_push_down_non_field_sort() { @Test void test_push_down_multiple_sort() { - requestBuilder.pushDownSort(List.of( - SortBuilders.fieldSort("intA"), - SortBuilders.fieldSort("intB"))); + requestBuilder.pushDownSort( + List.of(SortBuilders.fieldSort("intA"), SortBuilders.fieldSort("intB"))); assertSearchSourceBuilder( new SearchSourceBuilder() @@ -255,7 +257,7 @@ void test_push_down_project() { .from(DEFAULT_OFFSET) .size(DEFAULT_LIMIT) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -285,7 +287,7 @@ void test_push_down_project_limit() { .from(offset) .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -315,7 +317,7 @@ void test_push_down_project_limit_and_offset() { .from(offset) .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -333,24 +335,25 @@ void test_push_down_project_limit_and_offset() { @Test void test_push_down_nested() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() @@ -363,28 +366,29 @@ void test_push_down_nested() { @Test void test_push_down_multiple_nested_with_same_path() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ), - Map.of( - "field", new ReferenceExpression("message.from", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING)), + Map.of( + "field", new ReferenceExpression("message.from", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null), - new NamedExpression("message.from", DSL.nested(DSL.ref("message.from", STRING)), null) - ); + new NamedExpression("message.from", DSL.nested(DSL.ref("message.from", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info", "message.from"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext( + true, new String[] {"message.info", "message.from"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() .query(QueryBuilders.boolQuery().filter(QueryBuilders.boolQuery().must(nestedQuery))) @@ -396,35 +400,35 @@ void test_push_down_multiple_nested_with_same_path() { @Test void test_push_down_nested_with_filter() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.getSourceBuilder().query(QueryBuilders.rangeQuery("myNum").gt(3)); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() .query( - QueryBuilders.boolQuery().filter( - QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("myNum").gt(3)) - .must(nestedQuery) - ) - ) + QueryBuilders.boolQuery() + .filter( + QueryBuilders.boolQuery() + .must(QueryBuilders.rangeQuery("myNum").gt(3)) + .must(nestedQuery))) .from(DEFAULT_OFFSET) .size(DEFAULT_LIMIT) .timeout(DEFAULT_QUERY_TIMEOUT), @@ -433,17 +437,15 @@ void test_push_down_nested_with_filter() { @Test void testPushDownNestedWithNestedFilter() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); QueryBuilder innerFilterQuery = QueryBuilders.rangeQuery("myNum").gt(3); QueryBuilder filterQuery = @@ -452,20 +454,20 @@ void testPushDownNestedWithNestedFilter() { requestBuilder.getSourceBuilder().query(filterQuery); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); - - assertSearchSourceBuilder(new SearchSourceBuilder() - .query( - QueryBuilders.boolQuery().filter( - QueryBuilders.boolQuery() - .must(filterQuery) - ) - ) - .from(DEFAULT_OFFSET) - .size(DEFAULT_LIMIT) - .timeout(DEFAULT_QUERY_TIMEOUT), requestBuilder); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); + + assertSearchSourceBuilder( + new SearchSourceBuilder() + .query(QueryBuilders.boolQuery().filter(QueryBuilders.boolQuery().must(filterQuery))) + .from(DEFAULT_OFFSET) + .size(DEFAULT_LIMIT) + .timeout(DEFAULT_QUERY_TIMEOUT), + requestBuilder); } @Test @@ -479,8 +481,9 @@ void test_push_type_mapping() { @Test void push_down_highlight_with_repeating_fields() { requestBuilder.pushDownHighlight("name", Map.of()); - var exception = assertThrows(SemanticCheckException.class, () -> - requestBuilder.pushDownHighlight("name", Map.of())); + var exception = + assertThrows( + SemanticCheckException.class, () -> requestBuilder.pushDownHighlight("name", Map.of())); assertEquals("Duplicate field name in highlight", exception.getMessage()); } @@ -488,10 +491,7 @@ void push_down_highlight_with_repeating_fields() { void push_down_page_size() { requestBuilder.pushDownPageSize(3); assertSearchSourceBuilder( - new SearchSourceBuilder() - .from(DEFAULT_OFFSET) - .size(3) - .timeout(DEFAULT_QUERY_TIMEOUT), + new SearchSourceBuilder().from(DEFAULT_OFFSET).size(3).timeout(DEFAULT_QUERY_TIMEOUT), requestBuilder); } @@ -499,7 +499,8 @@ void push_down_page_size() { void exception_when_non_zero_offset_and_page_size() { requestBuilder.pushDownPageSize(3); requestBuilder.pushDownLimit(300, 2); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java index 4b9233dbc1..66cb6bf14c 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.jupiter.api.Assertions.assertAll; @@ -49,50 +48,48 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchScrollRequestTest { - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); public static final TimeValue SCROLL_TIMEOUT = TimeValue.timeValueMinutes(1); - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private SearchHits searchHits; + @Mock private SearchHits searchHits; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private SearchSourceBuilder sourceBuilder; + @Mock private SearchSourceBuilder sourceBuilder; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; private final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - private final OpenSearchScrollRequest request = new OpenSearchScrollRequest( - INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory, List.of()); + private final OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + INDEX_NAME, SCROLL_TIMEOUT, searchSourceBuilder, factory, List.of()); @Test void constructor() { - var request = new OpenSearchScrollRequest(INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory, List.of("test")); + var request = + new OpenSearchScrollRequest( + INDEX_NAME, SCROLL_TIMEOUT, searchSourceBuilder, factory, List.of("test")); assertEquals(List.of("test"), request.getIncludes()); } @Test void searchRequest() { searchSourceBuilder.query(QueryBuilders.termQuery("name", "John")); - request.search(searchRequest -> { - assertEquals( - new SearchRequest() - .indices("test") - .scroll(TimeValue.timeValueMinutes(1)) - .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), - searchRequest); - SearchHits searchHitsMock = when(mock(SearchHits.class).getHits()) - .thenReturn(new SearchHit[0]).getMock(); - return when(mock(SearchResponse.class).getHits()).thenReturn(searchHitsMock).getMock(); - }, searchScrollRequest -> null); + request.search( + searchRequest -> { + assertEquals( + new SearchRequest() + .indices("test") + .scroll(TimeValue.timeValueMinutes(1)) + .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), + searchRequest); + SearchHits searchHitsMock = + when(mock(SearchHits.class).getHits()).thenReturn(new SearchHit[0]).getMock(); + return when(mock(SearchResponse.class).getHits()).thenReturn(searchHitsMock).getMock(); + }, + searchScrollRequest -> null); } @Test @@ -110,21 +107,19 @@ void isScrollStarted() { void scrollRequest() { request.setScrollId("scroll123"); assertEquals( - new SearchScrollRequest() - .scroll(TimeValue.timeValueMinutes(1)) - .scrollId("scroll123"), + new SearchScrollRequest().scroll(TimeValue.timeValueMinutes(1)).scrollId("scroll123"), request.scrollRequest()); } @Test void search() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -135,13 +130,13 @@ void search() { @Test void search_without_context() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -154,13 +149,13 @@ void search_without_context() { @SneakyThrows void search_without_scroll_and_initial_request_should_throw() { // Steps: serialize a not used request, deserialize it, then use - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); var outStream = new BytesStreamOutput(); request.writeTo(outStream); outStream.flush(); @@ -172,20 +167,21 @@ void search_without_scroll_and_initial_request_should_throw() { assertAll( () -> assertFalse(request2.isScroll()), () -> assertNull(request2.getInitialSearchRequest()), - () -> assertThrows(UnsupportedOperationException.class, - () -> request2.search(sr -> fail("search"), sr -> fail("scroll"))) - ); + () -> + assertThrows( + UnsupportedOperationException.class, + () -> request2.search(sr -> fail("search"), sr -> fail("scroll")))); } @Test void search_withoutIncludes() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -213,9 +209,10 @@ void clean_on_empty_response() { // This could happen on sequential search calls SearchResponse searchResponse = mock(); when(searchResponse.getScrollId()).thenReturn("scroll1", "scroll2"); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F), - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F), + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); request.search((x) -> searchResponse, (x) -> searchResponse); assertEquals("scroll1", request.getScrollId()); @@ -233,8 +230,9 @@ void clean_on_empty_response() { void no_clean_on_non_empty_response() { SearchResponse searchResponse = mock(); when(searchResponse.getScrollId()).thenReturn("scroll"); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F)); request.search((sr) -> searchResponse, (sr) -> searchResponse); assertEquals("scroll", request.getScrollId()); @@ -246,8 +244,7 @@ void no_clean_on_non_empty_response() { @Test void no_cursor_on_empty_response() { SearchResponse searchResponse = mock(); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], null, 1f)); + when(searchResponse.getHits()).thenReturn(new SearchHits(new SearchHit[0], null, 1f)); request.search((x) -> searchResponse, (x) -> searchResponse); assertFalse(request.hasAnotherBatch()); @@ -256,8 +253,9 @@ void no_cursor_on_empty_response() { @Test void no_clean_if_no_scroll_in_response() { SearchResponse searchResponse = mock(); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); request.search((x) -> searchResponse, (x) -> searchResponse); assertEquals(NO_SCROLL_ID, request.getScrollId()); @@ -286,8 +284,10 @@ void serialize_deserialize_no_needClean() { @Test @SneakyThrows void serialize_deserialize_needClean() { - lenient().when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + lenient() + .when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); lenient().when(searchResponse.getScrollId()).thenReturn(""); var stream = new BytesStreamOutput(); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java index 7ed6c900dd..cd915cf5e5 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java @@ -36,12 +36,7 @@ class OpenSearchAggregationResponseParserTest { /** SELECT MAX(age) as max FROM accounts. */ @Test void no_bucket_one_metric_should_pass() { - String response = - "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; + String response = "{\n \"max#max\": {\n \"value\": 40\n }\n}"; NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); assertThat(parse(parser, response), contains(entry("max", 40d))); } @@ -145,12 +140,7 @@ void two_bucket_one_metric_should_pass() { @Test void unsupported_aggregation_should_fail() { - String response = - "{\n" - + " \"date_histogram#date_histogram\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; + String response = "{\n \"date_histogram#date_histogram\": {\n \"value\": 40\n }\n}"; NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); RuntimeException exception = assertThrows(RuntimeException.class, () -> parse(parser, response)); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java index b26847b095..6f4605bc2f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static java.util.Collections.emptyList; @@ -49,31 +48,25 @@ @ExtendWith(MockitoExtension.class) class OpenSearchResponseTest { - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit1; + @Mock private SearchHit searchHit1; - @Mock - private SearchHit searchHit2; + @Mock private SearchHit searchHit2; - @Mock - private Aggregations aggregations; + @Mock private Aggregations aggregations; private List includes = List.of(); - @Mock - private OpenSearchAggregationResponseParser parser; + @Mock private OpenSearchAggregationResponseParser parser; - private ExprTupleValue exprTupleValue1 = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", - new ExprIntegerValue(1))); + private ExprTupleValue exprTupleValue1 = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); - private ExprTupleValue exprTupleValue2 = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id2", - new ExprIntegerValue(2))); + private ExprTupleValue exprTupleValue2 = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id2", new ExprIntegerValue(2))); @Test void isEmpty() { @@ -119,7 +112,8 @@ void iterator() { when(searchHit1.getInnerHits()).thenReturn(null); when(searchHit2.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())) - .thenReturn(exprTupleValue1).thenReturn(exprTupleValue2); + .thenReturn(exprTupleValue1) + .thenReturn(exprTupleValue2); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, List.of("id1"))) { @@ -137,9 +131,8 @@ void iterator() { @Test void iterator_metafields() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -160,15 +153,16 @@ void iterator_metafields() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1), - "_index", new ExprStringValue("testIndex"), - "_id", new ExprStringValue("testId"), - "_routing", new ExprStringValue(shardTarget.toString()), - "_sort", new ExprLongValue(123456L), - "_score", new ExprFloatValue(3.75F), - "_maxscore", new ExprFloatValue(3.75F) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "id1", new ExprIntegerValue(1), + "_index", new ExprStringValue("testIndex"), + "_id", new ExprStringValue("testId"), + "_routing", new ExprStringValue(shardTarget.toString()), + "_sort", new ExprLongValue(123456L), + "_score", new ExprFloatValue(3.75F), + "_maxscore", new ExprFloatValue(3.75F))); List includes = List.of("id1", "_index", "_id", "_routing", "_sort", "_score", "_maxscore"); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { @@ -184,9 +178,8 @@ void iterator_metafields() { @Test void iterator_metafields_withoutIncludes() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -200,9 +193,8 @@ void iterator_metafields_withoutIncludes() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); List includes = List.of("id1"); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { if (i == 0) { @@ -217,9 +209,8 @@ void iterator_metafields_withoutIncludes() { @Test void iterator_metafields_scoreNaN() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -237,12 +228,13 @@ void iterator_metafields_scoreNaN() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); List includes = List.of("id1", "_index", "_id", "_sort", "_score", "_maxscore"); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1), - "_index", new ExprStringValue("testIndex"), - "_id", new ExprStringValue("testId"), - "_sort", new ExprLongValue(123456L) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "id1", new ExprIntegerValue(1), + "_index", new ExprStringValue("testIndex"), + "_id", new ExprStringValue("testId"), + "_sort", new ExprLongValue(123456L))); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { if (i == 0) { @@ -262,13 +254,14 @@ void iterator_with_inner_hits() { new SearchHit[] {searchHit1}, new TotalHits(2L, TotalHits.Relation.EQUAL_TO), 1.0F)); - when(searchHit1.getInnerHits()).thenReturn( - Map.of( - "innerHit", - new SearchHits( - new SearchHit[] {searchHit1}, - new TotalHits(2L, TotalHits.Relation.EQUAL_TO), - 1.0F))); + when(searchHit1.getInnerHits()) + .thenReturn( + Map.of( + "innerHit", + new SearchHits( + new SearchHit[] {searchHit1}, + new TotalHits(2L, TotalHits.Relation.EQUAL_TO), + 1.0F))); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue1); @@ -321,18 +314,17 @@ void aggregation_iterator() { @Test void highlight_iterator() { SearchHit searchHit = new SearchHit(1); - searchHit.sourceRef( - new BytesArray("{\"name\":\"John\"}")); - Map highlightMap = Map.of("highlights", - new HighlightField("Title", new Text[] {new Text("field")})); - searchHit.highlightFields(Map.of("highlights", new HighlightField("Title", - new Text[] {new Text("field")}))); + searchHit.sourceRef(new BytesArray("{\"name\":\"John\"}")); + Map highlightMap = + Map.of("highlights", new HighlightField("Title", new Text[] {new Text("field")})); + searchHit.highlightFields( + Map.of("highlights", new HighlightField("Title", new Text[] {new Text("field")}))); ExprValue resultTuple = ExprValueUtils.tupleValue(searchHit.getSourceAsMap()); when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[]{searchHit1}, + new SearchHit[] {searchHit1}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); @@ -340,11 +332,12 @@ void highlight_iterator() { when(factory.construct(any(), anyBoolean())).thenReturn(resultTuple); for (ExprValue resultHit : new OpenSearchResponse(searchResponse, factory, includes)) { - var expected = ExprValueUtils.collectionValue( - Arrays.stream(searchHit.getHighlightFields().get("highlights").getFragments()) - .map(t -> (t.toString())).collect(Collectors.toList())); - var result = resultHit.tupleValue().get( - "_highlight").tupleValue().get("highlights"); + var expected = + ExprValueUtils.collectionValue( + Arrays.stream(searchHit.getHighlightFields().get("highlights").getFragments()) + .map(t -> (t.toString())) + .collect(Collectors.toList())); + var result = resultHit.tupleValue().get("_highlight").tupleValue().get("highlights"); assertTrue(expected.equals(result)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java index 835798f162..ff2c311753 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.setting; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -33,14 +32,12 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSettingsTest { - @Mock - private ClusterSettings clusterSettings; + @Mock private ClusterSettings clusterSettings; @Test void getSettingValue() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue sizeValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -50,11 +47,14 @@ void getSettingValue() { @Test void getSettingValueWithPresetValuesInYml() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings - .get((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING)) + when(clusterSettings.get( + (Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING)) .thenReturn(new ByteSizeValue(20)); - when(clusterSettings.get(not(or(eq(ClusterName.CLUSTER_NAME_SETTING), - eq((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING))))) + when(clusterSettings.get( + not( + or( + eq(ClusterName.CLUSTER_NAME_SETTING), + eq((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING))))) .thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue sizeValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -78,8 +78,7 @@ void pluginNonDynamicSettings() { @Test void getSettings() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); assertFalse(settings.getSettings().isEmpty()); } @@ -87,12 +86,10 @@ void getSettings() { @Test void update() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue oldValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); - OpenSearchSettings.Updater updater = - settings.new Updater(Settings.Key.QUERY_MEMORY_LIMIT); + OpenSearchSettings.Updater updater = settings.new Updater(Settings.Key.QUERY_MEMORY_LIMIT); updater.accept(new ByteSizeValue(0L)); ByteSizeValue newValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -103,8 +100,7 @@ void update() { @Test void settingsFallback() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); assertEquals( settings.getSettingValue(Settings.Key.SQL_ENABLED), @@ -156,17 +152,17 @@ public void updateLegacySettingsFallback() { assertEquals(OpenSearchSettings.SQL_ENABLED_SETTING.get(settings), false); assertEquals(OpenSearchSettings.SQL_SLOWLOG_SETTING.get(settings), 10); - assertEquals(OpenSearchSettings.SQL_CURSOR_KEEP_ALIVE_SETTING.get(settings), - timeValueMinutes(1)); + assertEquals( + OpenSearchSettings.SQL_CURSOR_KEEP_ALIVE_SETTING.get(settings), timeValueMinutes(1)); assertEquals(OpenSearchSettings.PPL_ENABLED_SETTING.get(settings), true); - assertEquals(OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING.get(settings), + assertEquals( + OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING.get(settings), new ByteSizeValue((int) (JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() * 0.2))); assertEquals(OpenSearchSettings.QUERY_SIZE_LIMIT_SETTING.get(settings), 100); assertEquals(OpenSearchSettings.METRICS_ROLLING_WINDOW_SETTING.get(settings), 2000L); assertEquals(OpenSearchSettings.METRICS_ROLLING_INTERVAL_SETTING.get(settings), 100L); } - @Test void legacySettingsShouldBeDeprecatedBeforeRemove() { assertEquals(15, legacySettings().size()); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java index 39af59b6cd..3ddb07d86a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java @@ -63,20 +63,16 @@ class OpenSearchIndexTest { public static final int QUERY_SIZE_LIMIT = 200; public static final TimeValue SCROLL_TIMEOUT = new TimeValue(1); - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private IndexMapping mapping; + @Mock private IndexMapping mapping; private OpenSearchIndex index; @@ -94,16 +90,18 @@ void isExist() { @Test void createIndex() { - Map mappings = Map.of( - "properties", + Map mappings = Map.of( - "name", "text", - "age", "integer")); + "properties", + Map.of( + "name", "text", + "age", "integer")); doNothing().when(client).createIndex("test", mappings); Map schema = new HashMap<>(); - schema.put("name", OpenSearchTextType.of(Map.of("keyword", - OpenSearchDataType.of(MappingType.Keyword)))); + schema.put( + "name", + OpenSearchTextType.of(Map.of("keyword", OpenSearchDataType.of(MappingType.Keyword)))); schema.put("age", INTEGER); index.create(schema); verify(client).createIndex(any(), any()); @@ -111,24 +109,27 @@ void createIndex() { @Test void getFieldTypes() { - when(mapping.getFieldMappings()).thenReturn( - ImmutableMap.builder() - .put("name", MappingType.Keyword) - .put("address", MappingType.Text) - .put("age", MappingType.Integer) - .put("account_number", MappingType.Long) - .put("balance1", MappingType.Float) - .put("balance2", MappingType.Double) - .put("gender", MappingType.Boolean) - .put("family", MappingType.Nested) - .put("employer", MappingType.Object) - .put("birthday", MappingType.Date) - .put("id1", MappingType.Byte) - .put("id2", MappingType.Short) - .put("blob", MappingType.Binary) - .build().entrySet().stream().collect(Collectors.toMap( - Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue()) - ))); + when(mapping.getFieldMappings()) + .thenReturn( + ImmutableMap.builder() + .put("name", MappingType.Keyword) + .put("address", MappingType.Text) + .put("age", MappingType.Integer) + .put("account_number", MappingType.Long) + .put("balance1", MappingType.Float) + .put("balance2", MappingType.Double) + .put("gender", MappingType.Boolean) + .put("family", MappingType.Nested) + .put("employer", MappingType.Object) + .put("birthday", MappingType.Date) + .put("id1", MappingType.Byte) + .put("id2", MappingType.Short) + .put("blob", MappingType.Binary) + .build() + .entrySet() + .stream() + .collect( + Collectors.toMap(Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue())))); when(client.getIndexMappings("test")).thenReturn(ImmutableMap.of("test", mapping)); // Run more than once to confirm caching logic is covered and can work @@ -150,35 +151,30 @@ void getFieldTypes() { hasEntry("birthday", ExprCoreType.TIMESTAMP), hasEntry("id1", ExprCoreType.BYTE), hasEntry("id2", ExprCoreType.SHORT), - hasEntry("blob", (ExprType) OpenSearchDataType.of(MappingType.Binary)) - )); + hasEntry("blob", (ExprType) OpenSearchDataType.of(MappingType.Binary)))); } } @Test void checkCacheUsedForFieldMappings() { - when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(MappingType.Keyword))); - when(client.getIndexMappings("test")).thenReturn( - ImmutableMap.of("test", mapping)); + when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(MappingType.Keyword))); + when(client.getIndexMappings("test")).thenReturn(ImmutableMap.of("test", mapping)); OpenSearchIndex index = new OpenSearchIndex(client, settings, "test"); - assertThat(index.getFieldTypes(), allOf( - aMapWithSize(1), - hasEntry("name", STRING))); - assertThat(index.getFieldOpenSearchTypes(), allOf( - aMapWithSize(1), - hasEntry("name", OpenSearchDataType.of(STRING)))); + assertThat(index.getFieldTypes(), allOf(aMapWithSize(1), hasEntry("name", STRING))); + assertThat( + index.getFieldOpenSearchTypes(), + allOf(aMapWithSize(1), hasEntry("name", OpenSearchDataType.of(STRING)))); - lenient().when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(MappingType.Integer))); + lenient() + .when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(MappingType.Integer))); - assertThat(index.getFieldTypes(), allOf( - aMapWithSize(1), - hasEntry("name", STRING))); - assertThat(index.getFieldOpenSearchTypes(), allOf( - aMapWithSize(1), - hasEntry("name", OpenSearchDataType.of(STRING)))); + assertThat(index.getFieldTypes(), allOf(aMapWithSize(1), hasEntry("name", STRING))); + assertThat( + index.getFieldOpenSearchTypes(), + allOf(aMapWithSize(1), hasEntry("name", OpenSearchDataType.of(STRING)))); } @Test @@ -193,8 +189,7 @@ void getReservedFieldTypes() { hasEntry("_routing", ExprCoreType.STRING), hasEntry("_sort", ExprCoreType.LONG), hasEntry("_score", ExprCoreType.FLOAT), - hasEntry("_maxscore", ExprCoreType.FLOAT) - )); + hasEntry("_maxscore", ExprCoreType.FLOAT))); } @Test @@ -204,8 +199,9 @@ void implementRelationOperatorOnly() { LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE_LIMIT, exprValueFactory); - assertEquals(new OpenSearchIndexScan(client, - 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), + assertEquals( + new OpenSearchIndexScan( + client, 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), index.implement(index.optimize(plan))); } @@ -216,8 +212,10 @@ void implementRelationOperatorWithOptimization() { LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE_LIMIT, exprValueFactory); - assertEquals(new OpenSearchIndexScan(client, 200, - requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), index.implement(plan)); + assertEquals( + new OpenSearchIndexScan( + client, 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), + index.implement(plan)); } @Test @@ -239,12 +237,7 @@ void implementOtherLogicalOperators() { LogicalPlanDSL.dedupe( sort( eval( - remove( - rename( - index.createScanBuilder(), - mappings), - exclude), - newEvalField), + remove(rename(index.createScanBuilder(), mappings), exclude), newEvalField), sortField), dedupeField), include); @@ -258,9 +251,11 @@ void implementOtherLogicalOperators() { PhysicalPlanDSL.eval( PhysicalPlanDSL.remove( PhysicalPlanDSL.rename( - new OpenSearchIndexScan(client, - QUERY_SIZE_LIMIT, requestBuilder.build(INDEX_NAME, maxResultWindow, - SCROLL_TIMEOUT)), + new OpenSearchIndexScan( + client, + QUERY_SIZE_LIMIT, + requestBuilder.build( + INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), mappings), exclude), newEvalField), diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java index 1089e7e252..38f2ae495e 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.junit.jupiter.api.Assertions.assertAll; @@ -25,31 +24,23 @@ @ExtendWith(MockitoExtension.class) class OpenSearchStorageEngineTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private Settings settings; + @Mock private Settings settings; @Test public void getTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); - Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), - "test"); - assertAll( - () -> assertNotNull(table), - () -> assertTrue(table instanceof OpenSearchIndex) - ); + Table table = + engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), "test"); + assertAll(() -> assertNotNull(table), () -> assertTrue(table instanceof OpenSearchIndex)); } @Test public void getSystemTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); - Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), - TABLE_INFO); - assertAll( - () -> assertNotNull(table), - () -> assertTrue(table instanceof OpenSearchSystemIndex) - ); + Table table = + engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), TABLE_INFO); + assertAll(() -> assertNotNull(table), () -> assertTrue(table instanceof OpenSearchSystemIndex)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java index 5a510fefec..229d62abdf 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java @@ -26,10 +26,8 @@ @ExtendWith(MockitoExtension.class) class OpenSearchIndexScanAggregationBuilderTest { - @Mock - OpenSearchRequestBuilder requestBuilder; - @Mock - LogicalAggregation logicalAggregation; + @Mock OpenSearchRequestBuilder requestBuilder; + @Mock LogicalAggregation logicalAggregation; OpenSearchIndexScanAggregationBuilder builder; @BeforeEach @@ -71,5 +69,4 @@ void pushDownPageSize() { void pushDownNested() { assertFalse(builder.pushDownNested(mock(LogicalNested.class))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java index e045bae3e3..6749f87c5b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -91,16 +90,13 @@ @ExtendWith(MockitoExtension.class) class OpenSearchIndexScanOptimizationTest { - @Mock - private Table table; + @Mock private Table table; - @Mock - private OpenSearchIndexScan indexScan; + @Mock private OpenSearchIndexScan indexScan; private OpenSearchIndexScanBuilder indexScanBuilder; - @Mock - private OpenSearchRequestBuilder requestBuilder; + @Mock private OpenSearchRequestBuilder requestBuilder; private Runnable[] verifyPushDownCalls = {}; @@ -114,72 +110,54 @@ void setUp() { void test_project_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withProjectPushedDown(DSL.ref("intV", INTEGER))), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), - project( - relation("schema", table), - DSL.named("i", DSL.ref("intV", INTEGER))) - ); + indexScanBuilder(withProjectPushedDown(DSL.ref("intV", INTEGER))), + DSL.named("i", DSL.ref("intV", INTEGER))), + project(relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER)))); } - /** - * SELECT intV as i FROM schema WHERE intV = 1. - */ + /** SELECT intV as i FROM schema WHERE intV = 1. */ @Test void test_filter_push_down() { assertEqualsAfterOptimization( project( indexScanBuilder( - //withProjectPushedDown(DSL.ref("intV", INTEGER)), - withFilterPushedDown(QueryBuilders.termQuery("intV", 1)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), + // withProjectPushedDown(DSL.ref("intV", INTEGER)), + withFilterPushedDown(QueryBuilders.termQuery("intV", 1))), + DSL.named("i", DSL.ref("intV", INTEGER))), project( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + DSL.named("i", DSL.ref("intV", INTEGER)))); } - /** - * SELECT intV as i FROM schema WHERE query_string(["intV^1.5", "QUERY", boost=12.5). - */ + /** SELECT intV as i FROM schema WHERE query_string(["intV^1.5", "QUERY", boost=12.5). */ @Test void test_filter_on_opensearchfunction_with_trackedscores_push_down() { LogicalPlan expectedPlan = project( indexScanBuilder( withFilterPushedDown( - QueryBuilders.queryStringQuery("QUERY") - .field("intV", 1.5F) - .boost(12.5F) - ), - withTrackedScoresPushedDown(true) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression queryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + QueryBuilders.queryStringQuery("QUERY").field("intV", 1.5F).boost(12.5F)), + withTrackedScoresPushedDown(true)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression queryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) queryString).setScoreTracked(true); - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - queryString - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), queryString), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } @@ -197,35 +175,36 @@ void test_filter_on_multiple_opensearchfunctions_with_trackedscores_push_down() .should( QueryBuilders.queryStringQuery("QUERY") .field("intV", 1.5F) - .boost(12.5F) - ) - ), - withTrackedScoresPushedDown(true) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression firstQueryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + .boost(12.5F))), + withTrackedScoresPushedDown(true)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression firstQueryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) firstQueryString).setScoreTracked(false); - FunctionExpression secondQueryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + FunctionExpression secondQueryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) secondQueryString).setScoreTracked(true); - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - DSL.or(firstQueryString, secondQueryString) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), DSL.or(firstQueryString, secondQueryString)), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } @@ -235,34 +214,28 @@ void test_filter_on_opensearchfunction_without_trackedscores_push_down() { project( indexScanBuilder( withFilterPushedDown( - QueryBuilders.queryStringQuery("QUERY") - .field("intV", 1.5F) - .boost(12.5F) - ), - withTrackedScoresPushedDown(false) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression queryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); - - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - queryString - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + QueryBuilders.queryStringQuery("QUERY").field("intV", 1.5F).boost(12.5F)), + withTrackedScoresPushedDown(false)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression queryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); + + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), queryString), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } - /** - * SELECT avg(intV) FROM schema GROUP BY string_value. - */ + /** SELECT avg(intV) FROM schema GROUP BY string_value. */ @Test void test_aggregation_push_down() { assertEqualsAfterOptimization( @@ -272,20 +245,17 @@ void test_aggregation_push_down() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("longV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "longV", LONG)))), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "longV", LONG)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } /* @@ -319,125 +289,79 @@ void aggregation_cant_merge_indexScan_with_project() { } */ - /** - * Sort - Relation --> IndexScan. - */ + /** Sort - Relation --> IndexScan. */ @Test void test_sort_push_down() { assertEqualsAfterOptimization( indexScanBuilder( withSortPushedDown( - SortBuilders.fieldSort("intV").order(SortOrder.ASC).missing("_first")) - ), - sort( - relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER)) - ) - ); + SortBuilders.fieldSort("intV").order(SortOrder.ASC).missing("_first"))), + sort(relation("schema", table), Pair.of(SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER)))); } @Test void test_page_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withPageSizePushDown(5)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), - paginate(project( - relation("schema", table), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), 5 - )); + indexScanBuilder(withPageSizePushDown(5)), DSL.named("intV", DSL.ref("intV", INTEGER))), + paginate( + project(relation("schema", table), DSL.named("intV", DSL.ref("intV", INTEGER))), 5)); } @Test void test_score_sort_push_down() { assertEqualsAfterOptimization( - indexScanBuilder( - withSortPushedDown( - SortBuilders.scoreSort().order(SortOrder.ASC) - ) - ), + indexScanBuilder(withSortPushedDown(SortBuilders.scoreSort().order(SortOrder.ASC))), sort( relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("_score", INTEGER)) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("_score", INTEGER)))); } @Test void test_limit_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withLimitPushedDown(1, 1)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + indexScanBuilder(withLimitPushedDown(1, 1)), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( - limit( - relation("schema", table), - 1, 1), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + limit(relation("schema", table), 1, 1), DSL.named("intV", DSL.ref("intV", INTEGER)))); } @Test void test_highlight_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withHighlightPushedDown("*", Collections.emptyMap())), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ), + indexScanBuilder(withHighlightPushedDown("*", Collections.emptyMap())), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*")))), project( - highlight( - relation("schema", table), - DSL.literal("*"), Collections.emptyMap()), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ) - ); + highlight(relation("schema", table), DSL.literal("*"), Collections.emptyMap()), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*"))))); } @Test void test_nested_push_down() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); assertEqualsAfterOptimization( project( - nested( - indexScanBuilder( - withNestedPushedDown(nested.getFields())), args, projectList), - DSL.named("message.info", - DSL.nested(DSL.ref("message.info", STRING))) - ), - project( - nested( - relation("schema", table), args, projectList), - DSL.named("message.info", - DSL.nested(DSL.ref("message.info", STRING))) - ) - ); + nested(indexScanBuilder(withNestedPushedDown(nested.getFields())), args, projectList), + DSL.named("message.info", DSL.nested(DSL.ref("message.info", STRING)))), + project( + nested(relation("schema", table), args, projectList), + DSL.named("message.info", DSL.nested(DSL.ref("message.info", STRING))))); } - /** - * SELECT avg(intV) FROM schema WHERE intV = 1 GROUP BY string_value. - */ + /** SELECT avg(intV) FROM schema WHERE intV = 1 GROUP BY string_value. */ @Test void test_aggregation_filter_push_down() { assertEqualsAfterOptimization( @@ -448,50 +372,37 @@ void test_aggregation_filter_push_down() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("longV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "longV", LONG)))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } - /** - * Sort - Filter - Relation --> IndexScan. - */ + /** Sort - Filter - Relation --> IndexScan. */ @Test void test_sort_filter_push_down() { assertEqualsAfterOptimization( indexScanBuilder( withFilterPushedDown(QueryBuilders.termQuery("intV", 1)), withSortPushedDown( - SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first")) - ), + SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first"))), sort( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)))); } - /** - * SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY stringV. - */ + /** SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY stringV. */ @Test void test_sort_aggregation_push_down() { assertEqualsAfterOptimization( @@ -502,22 +413,19 @@ void test_sort_aggregation_push_down() { .aggregateBy("intV") .groupBy("stringV") .sortBy(SortOption.DEFAULT_DESC) - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_DESC, DSL.ref("stringV", STRING)) - ), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + Pair.of(SortOption.DEFAULT_DESC, DSL.ref("stringV", STRING))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } @Test @@ -529,21 +437,17 @@ void test_limit_sort_filter_push_down() { withSortPushedDown( SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first")), withLimitPushedDown(1, 1)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( limit( sort( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ), 1, 1 - ), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG))), + 1, + 1), + DSL.named("intV", DSL.ref("intV", INTEGER)))); } /* @@ -557,23 +461,16 @@ void only_one_project_should_be_push() { project( project( indexScanBuilder( - withProjectPushedDown( - DSL.ref("intV", INTEGER), - DSL.ref("stringV", STRING))), + withProjectPushedDown(DSL.ref("intV", INTEGER), DSL.ref("stringV", STRING))), DSL.named("i", DSL.ref("intV", INTEGER)), - DSL.named("s", DSL.ref("stringV", STRING)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), + DSL.named("s", DSL.ref("stringV", STRING))), + DSL.named("i", DSL.ref("intV", INTEGER))), project( project( relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER)), - DSL.named("s", DSL.ref("stringV", STRING)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ) - ); + DSL.named("s", DSL.ref("stringV", STRING))), + DSL.named("i", DSL.ref("intV", INTEGER)))); } @Test @@ -586,21 +483,14 @@ void test_nested_sort_filter_push_down() { SortBuilders.fieldSort("message.info") .order(SortOrder.ASC) .setNestedSort(new NestedSortBuilder("message")))), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( - sort( - filter( - relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of( - SortOption.DEFAULT_ASC, DSL.nested(DSL.ref("message.info", STRING)) - ) - ), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + sort( + filter( + relation("schema", table), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.nested(DSL.ref("message.info", STRING)))), + DSL.named("intV", DSL.ref("intV", INTEGER)))); } @Test @@ -610,54 +500,30 @@ void test_function_expression_sort_returns_optimized_logical_sort() { sort( indexScanBuilder(), Pair.of( - SortOption.DEFAULT_ASC, - DSL.match(DSL.namedArgument("field", literal("message"))) - ) - ), + SortOption.DEFAULT_ASC, DSL.match(DSL.namedArgument("field", literal("message"))))), sort( relation("schema", table), Pair.of( SortOption.DEFAULT_ASC, - DSL.match(DSL.namedArgument("field", literal("message")) - ) - ) - ) - ); + DSL.match(DSL.namedArgument("field", literal("message")))))); } @Test void test_non_field_sort_returns_optimized_logical_sort() { // Invalid use case coverage OpenSearchIndexScanBuilder::sortByFieldsOnly returns false assertEqualsAfterOptimization( - sort( - indexScanBuilder(), - Pair.of( - SortOption.DEFAULT_ASC, - DSL.literal("field") - ) - ), - sort( - relation("schema", table), - Pair.of( - SortOption.DEFAULT_ASC, - DSL.literal("field") - ) - ) - ); + sort(indexScanBuilder(), Pair.of(SortOption.DEFAULT_ASC, DSL.literal("field"))), + sort(relation("schema", table), Pair.of(SortOption.DEFAULT_ASC, DSL.literal("field")))); } @Test void sort_with_expression_cannot_merge_with_relation() { assertEqualsAfterOptimization( sort( - indexScanBuilder(), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ), + indexScanBuilder(), Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER)))), sort( relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))))); } @Test @@ -669,20 +535,17 @@ void sort_with_expression_cannot_merge_with_aggregation() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("stringV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER)))), sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))))); } @Test @@ -690,30 +553,21 @@ void aggregation_cant_merge_index_scan_with_limit() { assertEqualsAfterOptimization( project( aggregation( - indexScanBuilder( - withLimitPushedDown(10, 0)), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", - DSL.abs(DSL.ref("longV", LONG))))), + indexScanBuilder(withLimitPushedDown(10, 0)), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.abs(DSL.ref("longV", LONG))))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( - limit( - relation("schema", table), - 10, 0), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", - DSL.abs(DSL.ref("longV", LONG))))), + limit(relation("schema", table), 10, 0), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.abs(DSL.ref("longV", LONG))))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } /** - * Can't Optimize the following query. - * SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY avg(intV). + * Can't Optimize the following query. SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY + * avg(intV). */ @Test void sort_refer_to_aggregator_should_not_merge_with_indexAgg() { @@ -725,52 +579,39 @@ void sort_refer_to_aggregator_should_not_merge_with_indexAgg() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("stringV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER)) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER)) - ), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } @Test void project_literal_should_not_be_pushed_down() { assertEqualsAfterOptimization( - project( - indexScanBuilder(), - DSL.named("i", DSL.literal("str")) - ), - optimize( - project( - relation("schema", table), - DSL.named("i", DSL.literal("str")) - ) - ) - ); + project(indexScanBuilder(), DSL.named("i", DSL.literal("str"))), + optimize(project(relation("schema", table), DSL.named("i", DSL.literal("str"))))); } private OpenSearchIndexScanBuilder indexScanBuilder(Runnable... verifyPushDownCalls) { this.verifyPushDownCalls = verifyPushDownCalls; - return new OpenSearchIndexScanBuilder(new OpenSearchIndexScanQueryBuilder(requestBuilder), - requestBuilder -> indexScan); + return new OpenSearchIndexScanBuilder( + new OpenSearchIndexScanQueryBuilder(requestBuilder), requestBuilder -> indexScan); } private OpenSearchIndexScanBuilder indexScanAggBuilder(Runnable... verifyPushDownCalls) { this.verifyPushDownCalls = verifyPushDownCalls; - var aggregationBuilder = new OpenSearchIndexScanAggregationBuilder( - requestBuilder, mock(LogicalAggregation.class)); + var aggregationBuilder = + new OpenSearchIndexScanAggregationBuilder(requestBuilder, mock(LogicalAggregation.class)); return new OpenSearchIndexScanBuilder(aggregationBuilder, builder -> indexScan); } @@ -797,29 +638,32 @@ private Runnable withAggregationPushedDown( AggregationAssertHelper.AggregationAssertHelperBuilder aggregation) { // Assume single term bucket and AVG metric in all tests in this suite - CompositeAggregationBuilder aggBuilder = AggregationBuilders.composite( - "composite_buckets", - Collections.singletonList( - new TermsValuesSourceBuilder(aggregation.groupBy) - .field(aggregation.groupBy) - .order(aggregation.sortBy.getSortOrder() == ASC ? "asc" : "desc") - .missingOrder(aggregation.sortBy.getNullOrder() == NULL_FIRST ? "first" : "last") - .missingBucket(true))) - .subAggregation( - AggregationBuilders.avg(aggregation.aggregateName) - .field(aggregation.aggregateBy)) - .size(AggregationQueryBuilder.AGGREGATION_BUCKET_SIZE); + CompositeAggregationBuilder aggBuilder = + AggregationBuilders.composite( + "composite_buckets", + Collections.singletonList( + new TermsValuesSourceBuilder(aggregation.groupBy) + .field(aggregation.groupBy) + .order(aggregation.sortBy.getSortOrder() == ASC ? "asc" : "desc") + .missingOrder( + aggregation.sortBy.getNullOrder() == NULL_FIRST ? "first" : "last") + .missingBucket(true))) + .subAggregation( + AggregationBuilders.avg(aggregation.aggregateName).field(aggregation.aggregateBy)) + .size(AggregationQueryBuilder.AGGREGATION_BUCKET_SIZE); List aggBuilders = Collections.singletonList(aggBuilder); OpenSearchAggregationResponseParser responseParser = - new CompositeAggregationParser( - new SingleValueParser(aggregation.aggregateName)); + new CompositeAggregationParser(new SingleValueParser(aggregation.aggregateName)); return () -> { verify(requestBuilder, times(1)).pushDownAggregation(Pair.of(aggBuilders, responseParser)); - verify(requestBuilder, times(1)).pushTypeMapping(aggregation.resultTypes - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - e -> OpenSearchDataType.of(e.getValue())))); + verify(requestBuilder, times(1)) + .pushTypeMapping( + aggregation.resultTypes.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue())))); }; } @@ -832,8 +676,8 @@ private Runnable withLimitPushedDown(int size, int offset) { } private Runnable withProjectPushedDown(ReferenceExpression... references) { - return () -> verify(requestBuilder, times(1)).pushDownProjects( - new HashSet<>(Arrays.asList(references))); + return () -> + verify(requestBuilder, times(1)).pushDownProjects(new HashSet<>(Arrays.asList(references))); } private Runnable withHighlightPushedDown(String field, Map arguments) { @@ -875,16 +719,18 @@ private static class AggregationAssertHelper { } private LogicalPlan optimize(LogicalPlan plan) { - LogicalPlanOptimizer optimizer = new LogicalPlanOptimizer(List.of( - new CreateTableScanBuilder(), - new PushDownPageSize(), - PUSH_DOWN_FILTER, - PUSH_DOWN_AGGREGATION, - PUSH_DOWN_SORT, - PUSH_DOWN_LIMIT, - PUSH_DOWN_HIGHLIGHT, - PUSH_DOWN_NESTED, - PUSH_DOWN_PROJECT)); + LogicalPlanOptimizer optimizer = + new LogicalPlanOptimizer( + List.of( + new CreateTableScanBuilder(), + new PushDownPageSize(), + PUSH_DOWN_FILTER, + PUSH_DOWN_AGGREGATION, + PUSH_DOWN_SORT, + PUSH_DOWN_LIMIT, + PUSH_DOWN_HIGHLIGHT, + PUSH_DOWN_NESTED, + PUSH_DOWN_PROJECT)); return optimizer.optimize(plan); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java index 67f0869d6e..2085519b12 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java @@ -44,34 +44,37 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) public class OpenSearchIndexScanPaginationTest { - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); public static final int MAX_RESULT_WINDOW = 3; public static final TimeValue SCROLL_TIMEOUT = TimeValue.timeValueMinutes(4); - @Mock - private Settings settings; + @Mock private Settings settings; @BeforeEach void setup() { lenient().when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(QUERY_SIZE); - lenient().when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) - .thenReturn(TimeValue.timeValueMinutes(1)); + lenient() + .when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); } - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - private final OpenSearchExprValueFactory exprValueFactory - = new OpenSearchExprValueFactory(Map.of( - "name", OpenSearchDataType.of(STRING), - "department", OpenSearchDataType.of(STRING))); + private final OpenSearchExprValueFactory exprValueFactory = + new OpenSearchExprValueFactory( + Map.of( + "name", OpenSearchDataType.of(STRING), + "department", OpenSearchDataType.of(STRING))); @Test void query_empty_result() { mockResponse(client); var builder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (var indexScan = new OpenSearchIndexScan(client, MAX_RESULT_WINDOW, - builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { + try (var indexScan = + new OpenSearchIndexScan( + client, + MAX_RESULT_WINDOW, + builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -80,8 +83,11 @@ void query_empty_result() { @Test void explain_not_implemented() { - assertThrows(Throwable.class, () -> mock(OpenSearchIndexScan.class, - withSettings().defaultAnswer(CALLS_REAL_METHODS)).explain()); + assertThrows( + Throwable.class, + () -> + mock(OpenSearchIndexScan.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)) + .explain()); } @Test @@ -92,9 +98,11 @@ void dont_serialize_if_no_cursor() { OpenSearchResponse response = mock(); when(builder.build(any(), anyInt(), any())).thenReturn(request); when(client.search(any())).thenReturn(response); - try (var indexScan - = new OpenSearchIndexScan(client, MAX_RESULT_WINDOW, - builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { + try (var indexScan = + new OpenSearchIndexScan( + client, + MAX_RESULT_WINDOW, + builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { indexScan.open(); when(request.hasAnotherBatch()).thenReturn(false); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java index 67749c4055..ac1e9038fb 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import static org.junit.jupiter.api.Assertions.assertAll; @@ -66,20 +65,19 @@ class OpenSearchIndexScanTest { public static final int QUERY_SIZE = 200; - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("employees"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("employees"); public static final int MAX_RESULT_WINDOW = 10000; public static final TimeValue CURSOR_KEEP_ALIVE = TimeValue.timeValueMinutes(1); - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - private final OpenSearchExprValueFactory exprValueFactory = new OpenSearchExprValueFactory( - Map.of("name", OpenSearchDataType.of(STRING), - "department", OpenSearchDataType.of(STRING))); + private final OpenSearchExprValueFactory exprValueFactory = + new OpenSearchExprValueFactory( + Map.of( + "name", OpenSearchDataType.of(STRING), "department", OpenSearchDataType.of(STRING))); @BeforeEach - void setup() { - } + void setup() {} @Test void explain() { @@ -96,8 +94,8 @@ void throws_no_cursor_exception() { var request = mock(OpenSearchRequest.class); when(request.hasAnotherBatch()).thenReturn(false); try (var indexScan = new OpenSearchIndexScan(client, QUERY_SIZE, request); - var byteStream = new ByteArrayOutputStream(); - var objectStream = new ObjectOutputStream(byteStream)) { + var byteStream = new ByteArrayOutputStream(); + var objectStream = new ObjectOutputStream(byteStream)) { assertThrows(NoCursorException.class, () -> objectStream.writeObject(indexScan)); } } @@ -112,8 +110,9 @@ void serialize() { var index = mock(OpenSearchIndex.class); when(engine.getClient()).thenReturn(client); when(engine.getTable(any(), any())).thenReturn(index); - var request = new OpenSearchScrollRequest( - INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory, List.of()); + var request = + new OpenSearchScrollRequest( + INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory, List.of()); request.setScrollId("valid-id"); // make a response, so OpenSearchResponse::isEmpty would return true and unset needClean var response = mock(SearchResponse.class); @@ -121,7 +120,7 @@ void serialize() { var hits = mock(SearchHits.class); when(response.getHits()).thenReturn(hits); when(response.getScrollId()).thenReturn("valid-id"); - when(hits.getHits()).thenReturn(new SearchHit[]{ mock() }); + when(hits.getHits()).thenReturn(new SearchHit[] {mock()}); request.search(null, (req) -> response); try (var indexScan = new OpenSearchIndexScan(client, QUERY_SIZE, request)) { @@ -145,8 +144,9 @@ void query_empty_result() { mockResponse(client); final var name = new OpenSearchRequest.IndexName("test"); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - QUERY_SIZE, requestBuilder.build(name, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, QUERY_SIZE, requestBuilder.build(name, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -155,88 +155,84 @@ void query_empty_result() { @Test void query_all_results_with_query() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), employee(2, "Smith", "HR"), employee(3, "Allen", "IT") + }); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } - static final OpenSearchRequest.IndexName EMPLOYEES_INDEX - = new OpenSearchRequest.IndexName("employees"); + static final OpenSearchRequest.IndexName EMPLOYEES_INDEX = + new OpenSearchRequest.IndexName("employees"); @Test void query_all_results_with_scroll() { - mockResponse(client, - new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, - new ExprValue[]{employee(3, "Allen", "IT")}); + mockResponse( + client, + new ExprValue[] {employee(1, "John", "IT"), employee(2, "Smith", "HR")}, + new ExprValue[] {employee(3, "Allen", "IT")}); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @Test void query_some_results_with_query() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT"), - employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT"), + employee(4, "Bob", "HR") + }); final int limit = 3; OpenSearchRequestBuilder builder = new OpenSearchRequestBuilder(0, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - limit, builder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, limit, builder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @@ -245,55 +241,56 @@ void query_some_results_with_query() { void query_some_results_with_scroll() { mockTwoPageResponse(client); final var requestuilder = new OpenSearchRequestBuilder(10, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 3, requestuilder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 3, requestuilder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } static void mockTwoPageResponse(OpenSearchClient client) { - mockResponse(client, - new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, - new ExprValue[]{employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] {employee(1, "John", "IT"), employee(2, "Smith", "HR")}, + new ExprValue[] {employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); } @Test void query_results_limited_by_query_size() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT"), - employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT"), + employee(4, "Bob", "HR") + }); final int defaultQuerySize = 2; final var requestBuilder = new OpenSearchRequestBuilder(defaultQuerySize, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - defaultQuerySize, requestBuilder.build(INDEX_NAME, QUERY_SIZE, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, + defaultQuerySize, + requestBuilder.build(INDEX_NAME, QUERY_SIZE, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @@ -323,7 +320,8 @@ void push_down_highlight() { .pushDown(QueryBuilders.termQuery("name", "John")) .pushDownHighlight("Title", args) .pushDownHighlight("Body", args) - .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), + .shouldQueryHighlight( + QueryBuilders.termQuery("name", "John"), new HighlightBuilder().field("Title").field("Body")); } @@ -332,14 +330,12 @@ void push_down_highlight_with_arguments() { Map args = new HashMap<>(); args.put("pre_tags", new Literal("", DataType.STRING)); args.put("post_tags", new Literal("", DataType.STRING)); - HighlightBuilder highlightBuilder = new HighlightBuilder() - .field("Title"); + HighlightBuilder highlightBuilder = new HighlightBuilder().field("Title"); highlightBuilder.fields().get(0).preTags("").postTags(""); assertThat() .pushDown(QueryBuilders.termQuery("name", "John")) .pushDownHighlight("Title", args) - .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), - highlightBuilder); + .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), highlightBuilder); } private PushDownAssertion assertThat() { @@ -352,8 +348,7 @@ private static class PushDownAssertion { private final OpenSearchResponse response; private final OpenSearchExprValueFactory factory; - public PushDownAssertion(OpenSearchClient client, - OpenSearchExprValueFactory valueFactory) { + public PushDownAssertion(OpenSearchClient client, OpenSearchExprValueFactory valueFactory) { this.client = client; this.requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, valueFactory); @@ -373,35 +368,39 @@ PushDownAssertion pushDownHighlight(String query, Map arguments } PushDownAssertion shouldQueryHighlight(QueryBuilder query, HighlightBuilder highlight) { - var sourceBuilder = new SearchSourceBuilder() - .from(0) - .timeout(CURSOR_KEEP_ALIVE) - .query(query) - .size(QUERY_SIZE) - .highlighter(highlight) - .sort(DOC_FIELD_NAME, ASC); + var sourceBuilder = + new SearchSourceBuilder() + .from(0) + .timeout(CURSOR_KEEP_ALIVE) + .query(query) + .size(QUERY_SIZE) + .highlighter(highlight) + .sort(DOC_FIELD_NAME, ASC); OpenSearchRequest request = new OpenSearchQueryRequest(EMPLOYEES_INDEX, sourceBuilder, factory, List.of()); when(client.search(request)).thenReturn(response); - var indexScan = new OpenSearchIndexScan(client, - QUERY_SIZE, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); + var indexScan = + new OpenSearchIndexScan( + client, QUERY_SIZE, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); indexScan.open(); return this; } PushDownAssertion shouldQuery(QueryBuilder expected) { - var builder = new SearchSourceBuilder() - .from(0) - .query(expected) - .size(QUERY_SIZE) - .timeout(CURSOR_KEEP_ALIVE) - .sort(DOC_FIELD_NAME, ASC); + var builder = + new SearchSourceBuilder() + .from(0) + .query(expected) + .size(QUERY_SIZE) + .timeout(CURSOR_KEEP_ALIVE) + .sort(DOC_FIELD_NAME, ASC); OpenSearchRequest request = new OpenSearchQueryRequest(EMPLOYEES_INDEX, builder, factory, List.of()); when(client.search(request)).thenReturn(response); - var indexScan = new OpenSearchIndexScan(client, - 10000, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); + var indexScan = + new OpenSearchIndexScan( + client, 10000, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); indexScan.open(); return this; } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java index 0b0568a6b7..5f233d7f45 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java @@ -1,6 +1,5 @@ package org.opensearch.sql.opensearch.storage.scan; - import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.mockito.Mockito.mock; @@ -21,12 +20,13 @@ class PushDownQueryBuilderTest { @Test void default_implementations() { - var sample = new PushDownQueryBuilder() { - @Override - public OpenSearchRequestBuilder build() { - return null; - } - }; + var sample = + new PushDownQueryBuilder() { + @Override + public OpenSearchRequestBuilder build() { + return null; + } + }; assertAll( () -> assertFalse(sample.pushDownFilter(mock(LogicalFilter.class))), () -> assertFalse(sample.pushDownProject(mock(LogicalProject.class))), @@ -34,9 +34,6 @@ public OpenSearchRequestBuilder build() { () -> assertFalse(sample.pushDownSort(mock(LogicalSort.class))), () -> assertFalse(sample.pushDownNested(mock(LogicalNested.class))), () -> assertFalse(sample.pushDownLimit(mock(LogicalLimit.class))), - () -> assertFalse(sample.pushDownPageSize(mock(LogicalPaginate.class))) - - ); + () -> assertFalse(sample.pushDownPageSize(mock(LogicalPaginate.class)))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java index 32c02959b8..781e27d71a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java @@ -36,76 +36,82 @@ class QueryStringTest { private final QueryStringQuery queryStringQuery = new QueryStringQuery(); private final FunctionName queryStringFunc = FunctionName.of("query_string"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { Expression field = DSL.namedArgument("fields", fields_value); Expression query = DSL.namedArgument("query", query_value); return Stream.of( - DSL.namedArgument("analyzer", DSL.literal("standard")), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")), - DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), - DSL.namedArgument("boost", DSL.literal("1")), - DSL.namedArgument("default_operator", DSL.literal("AND")), - DSL.namedArgument("default_operator", DSL.literal("and")), - DSL.namedArgument("enable_position_increments", DSL.literal("true")), - DSL.namedArgument("escape", DSL.literal("false")), - DSL.namedArgument("fuzziness", DSL.literal("1")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), - DSL.namedArgument("lenient", DSL.literal("true")), - DSL.namedArgument("max_determinized_states", DSL.literal("10000")), - DSL.namedArgument("minimum_should_match", DSL.literal("4")), - DSL.namedArgument("quote_analyzer", DSL.literal("standard")), - DSL.namedArgument("phrase_slop", DSL.literal("0")), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), - DSL.namedArgument("rewrite", DSL.literal("constant_score")), - DSL.namedArgument("type", DSL.literal("best_fields")), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")), - DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), - DSL.namedArgument("ANALYZER", DSL.literal("standard")), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), - DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), - DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), - DSL.namedArgument("Boost", DSL.literal("1")) - ).map(arg -> List.of(field, query, arg)); + DSL.namedArgument("analyzer", DSL.literal("standard")), + DSL.namedArgument("analyze_wildcard", DSL.literal("true")), + DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), + DSL.namedArgument("boost", DSL.literal("1")), + DSL.namedArgument("default_operator", DSL.literal("AND")), + DSL.namedArgument("default_operator", DSL.literal("and")), + DSL.namedArgument("enable_position_increments", DSL.literal("true")), + DSL.namedArgument("escape", DSL.literal("false")), + DSL.namedArgument("fuzziness", DSL.literal("1")), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), + DSL.namedArgument("lenient", DSL.literal("true")), + DSL.namedArgument("max_determinized_states", DSL.literal("10000")), + DSL.namedArgument("minimum_should_match", DSL.literal("4")), + DSL.namedArgument("quote_analyzer", DSL.literal("standard")), + DSL.namedArgument("phrase_slop", DSL.literal("0")), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), + DSL.namedArgument("rewrite", DSL.literal("constant_score")), + DSL.namedArgument("type", DSL.literal("best_fields")), + DSL.namedArgument("tie_breaker", DSL.literal("0.3")), + DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), + DSL.namedArgument("ANALYZER", DSL.literal("standard")), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), + DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), + DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), + DSL.namedArgument("Boost", DSL.literal("1"))) + .map(arg -> List.of(field, query, arg)); } @ParameterizedTest @MethodSource("generateValidData") void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(queryStringQuery.build( - new QueryStringExpression(validArgs))); + Assertions.assertNotNull(queryStringQuery.build(new QueryStringExpression(validArgs))); } @Test void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @Test void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @Test void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @@ -124,14 +130,16 @@ public QueryStringExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java index a61b47b7b1..d81218c0c3 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java @@ -37,78 +37,78 @@ class QueryTest { static Stream> generateValidData() { Expression query = DSL.namedArgument("query", query_value); return List.of( - DSL.namedArgument("analyzer", DSL.literal("standard")), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")), - DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), - DSL.namedArgument("boost", DSL.literal("1")), - DSL.namedArgument("default_operator", DSL.literal("AND")), - DSL.namedArgument("default_operator", DSL.literal("and")), - DSL.namedArgument("enable_position_increments", DSL.literal("true")), - DSL.namedArgument("escape", DSL.literal("false")), - DSL.namedArgument("fuzziness", DSL.literal("1")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), - DSL.namedArgument("lenient", DSL.literal("true")), - DSL.namedArgument("max_determinized_states", DSL.literal("10000")), - DSL.namedArgument("minimum_should_match", DSL.literal("4")), - DSL.namedArgument("quote_analyzer", DSL.literal("standard")), - DSL.namedArgument("phrase_slop", DSL.literal("0")), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), - DSL.namedArgument("rewrite", DSL.literal("constant_score")), - DSL.namedArgument("type", DSL.literal("best_fields")), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")), - DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), - DSL.namedArgument("ANALYZER", DSL.literal("standard")), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), - DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), - DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), - DSL.namedArgument("Boost", DSL.literal("1")) - ).stream().map(arg -> List.of(query, arg)); + DSL.namedArgument("analyzer", DSL.literal("standard")), + DSL.namedArgument("analyze_wildcard", DSL.literal("true")), + DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), + DSL.namedArgument("boost", DSL.literal("1")), + DSL.namedArgument("default_operator", DSL.literal("AND")), + DSL.namedArgument("default_operator", DSL.literal("and")), + DSL.namedArgument("enable_position_increments", DSL.literal("true")), + DSL.namedArgument("escape", DSL.literal("false")), + DSL.namedArgument("fuzziness", DSL.literal("1")), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), + DSL.namedArgument("lenient", DSL.literal("true")), + DSL.namedArgument("max_determinized_states", DSL.literal("10000")), + DSL.namedArgument("minimum_should_match", DSL.literal("4")), + DSL.namedArgument("quote_analyzer", DSL.literal("standard")), + DSL.namedArgument("phrase_slop", DSL.literal("0")), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), + DSL.namedArgument("rewrite", DSL.literal("constant_score")), + DSL.namedArgument("type", DSL.literal("best_fields")), + DSL.namedArgument("tie_breaker", DSL.literal("0.3")), + DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), + DSL.namedArgument("ANALYZER", DSL.literal("standard")), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), + DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), + DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), + DSL.namedArgument("Boost", DSL.literal("1"))) + .stream() + .map(arg -> List.of(query, arg)); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(queryQuery.build( - new QueryExpression(validArgs))); + Assertions.assertNotNull(queryQuery.build(new QueryExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + assertThrows( + SyntaxCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SyntaxCheckException_when_field_argument() { - List arguments = List.of( - namedArgument("fields", "invalid argument"), - namedArgument("query", query_value)); - assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + List arguments = + List.of(namedArgument("fields", "invalid argument"), namedArgument("query", query_value)); + assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + List arguments = + List.of( + namedArgument("query", query_value), namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_sending_parameter_multiple_times() { - List arguments = List.of( + List arguments = + List.of( namedArgument("query", query_value), namedArgument("allow_leading_wildcard", DSL.literal("true")), namedArgument("allow_leading_wildcard", DSL.literal("true"))); - Assertions.assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } private NamedArgumentExpression namedArgument(String name, String value) { @@ -126,14 +126,16 @@ public QueryExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } @@ -141,7 +143,6 @@ public ExprType type() { public void test_can_get_query_name() { List arguments = List.of(namedArgument("query", query_value)); queryQuery.build(new QueryExpression(arguments)); - Assertions.assertEquals("query", - queryQuery.getQueryName()); + Assertions.assertEquals("query", queryQuery.getQueryName()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java index 208c782593..ca87f42900 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -21,9 +20,10 @@ class RangeQueryTest { @Test void should_throw_exception_for_unsupported_comparison() { // Note that since we do switch check on enum comparison, this should be impossible - assertThrows(IllegalStateException.class, () -> - new RangeQuery(Comparison.BETWEEN) - .doBuild("name", STRING, ExprValueUtils.stringValue("John"))); + assertThrows( + IllegalStateException.class, + () -> + new RangeQuery(Comparison.BETWEEN) + .doBuild("name", STRING, ExprValueUtils.stringValue("John"))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java index f7129117a1..ea14461521 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -37,148 +36,129 @@ class SimpleQueryStringTest { private final SimpleQueryStringQuery simpleQueryStringQuery = new SimpleQueryStringQuery(); private final FunctionName simpleQueryString = FunctionName.of("simple_query_string"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { return Stream.of( - List.of( - DSL.namedArgument("fields", fields_value), - DSL.namedArgument("query", query_value) - ), + List.of(DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value)), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")) - ), + DSL.namedArgument("analyze_wildcard", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyzer", DSL.literal("standard")) - ), + DSL.namedArgument("analyzer", DSL.literal("standard"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")) - ), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX|NOT|AND")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX|NOT|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("NOT|AND")) - ), + DSL.namedArgument("flags", DSL.literal("NOT|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX|not|AND")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX|not|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("not|and")) - ), + DSL.namedArgument("flags", DSL.literal("not|and"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")) - ), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")) - ), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")) - ), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("lenient", DSL.literal("true")) - ), + DSL.namedArgument("lenient", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("default_operator", DSL.literal("AND")) - ), + DSL.namedArgument("default_operator", DSL.literal("AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("default_operator", DSL.literal("and")) - ), + DSL.namedArgument("default_operator", DSL.literal("and"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("minimum_should_match", DSL.literal("4")) - ), + DSL.namedArgument("minimum_should_match", DSL.literal("4"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")) - ), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("boost", DSL.literal("1")) - ), - List.of( - DSL.namedArgument("FIELDS", fields_value), - DSL.namedArgument("QUERY", query_value) - ), + DSL.namedArgument("boost", DSL.literal("1"))), + List.of(DSL.namedArgument("FIELDS", fields_value), DSL.namedArgument("QUERY", query_value)), List.of( DSL.namedArgument("FIELDS", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")) - ), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyZER", DSL.literal("standard")) - ) - ); + DSL.namedArgument("analyZER", DSL.literal("standard")))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(simpleQueryStringQuery.build( - new SimpleQueryStringExpression(validArgs))); + Assertions.assertNotNull( + simpleQueryStringQuery.build(new SimpleQueryStringExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @@ -197,14 +177,16 @@ public SimpleQueryStringExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java index 98bd7c5784..7182626c02 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java @@ -36,46 +36,45 @@ class WildcardQueryTest { static Stream> generateValidData() { return Stream.of( List.of( - namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of())), + namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of())), namedArgument("query", "query_value*"), namedArgument("boost", "0.7"), namedArgument("case_insensitive", "false"), - namedArgument("rewrite", "constant_score_boolean") - ) - ); + namedArgument("rewrite", "constant_score_boolean"))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(wildcardQueryQuery.build( - new WildcardQueryExpression(validArgs))); + Assertions.assertNotNull(wildcardQueryQuery.build(new WildcardQueryExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { - List arguments = List.of(namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, + List arguments = + List.of(namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of())), - namedArgument("query", "query_value*"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of())), + namedArgument("query", "query_value*"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @@ -86,14 +85,16 @@ public WildcardQueryExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java index 5406f4cb58..a93a1e5fa4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java @@ -48,12 +48,17 @@ class RelevanceQueryBuildTest { private QueryBuilder queryBuilder; private final Map> queryBuildActions = ImmutableMap.>builder() - .put("boost", (k, v) -> k.boost(Float.parseFloat(v.stringValue()))).build(); + .put("boost", (k, v) -> k.boost(Float.parseFloat(v.stringValue()))) + .build(); @BeforeEach public void setUp() { - query = mock(RelevanceQuery.class, withSettings().useConstructor(queryBuildActions) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + RelevanceQuery.class, + withSettings() + .useConstructor(queryBuildActions) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); queryBuilder = mock(QueryBuilder.class); when(query.createQueryBuilder(any())).thenReturn(queryBuilder); String queryName = "mock_query"; @@ -64,9 +69,13 @@ public void setUp() { @Test void throws_SemanticCheckException_when_same_argument_twice() { - FunctionExpression expr = createCall(List.of(FIELD_ARG, QUERY_ARG, - namedArgument("boost", "2.3"), - namedArgument("boost", "2.4"))); + FunctionExpression expr = + createCall( + List.of( + FIELD_ARG, + QUERY_ARG, + namedArgument("boost", "2.3"), + namedArgument("boost", "2.4"))); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> query.build(expr)); assertEquals("Parameter 'boost' can only be specified once.", exception.getMessage()); @@ -79,8 +88,7 @@ void throws_SemanticCheckException_when_wrong_argument_name() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> query.build(expr)); - assertEquals("Parameter wrongarg is invalid for mock_query function.", - exception.getMessage()); + assertEquals("Parameter wrongarg is invalid for mock_query function.", exception.getMessage()); } @Test @@ -95,14 +103,13 @@ void calls_action_when_correct_argument_name() { @ParameterizedTest @MethodSource("insufficientArguments") public void throws_SyntaxCheckException_when_no_required_arguments(List arguments) { - SyntaxCheckException exception = assertThrows(SyntaxCheckException.class, - () -> query.build(createCall(arguments))); + SyntaxCheckException exception = + assertThrows(SyntaxCheckException.class, () -> query.build(createCall(arguments))); assertEquals("mock_query requires at least two parameters", exception.getMessage()); } public static Stream> insufficientArguments() { - return Stream.of(List.of(), - List.of(namedArgument("field", "field_A"))); + return Stream.of(List.of(), List.of(namedArgument("field", "field_A"))); } private static NamedArgumentExpression namedArgument(String field, String fieldValue) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java index 3628dc8abc..7234ee9275 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java @@ -26,14 +26,17 @@ class SingleFieldQueryTest { SingleFieldQuery query; private final String testQueryName = "test_query"; - private final Map actionMap - = ImmutableMap.of("paramA", (o, v) -> o); + private final Map actionMap = + ImmutableMap.of("paramA", (o, v) -> o); @BeforeEach void setUp() { - query = mock(SingleFieldQuery.class, - Mockito.withSettings().useConstructor(actionMap) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + SingleFieldQuery.class, + Mockito.withSettings() + .useConstructor(actionMap) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); when(query.getQueryName()).thenReturn(testQueryName); } @@ -42,15 +45,20 @@ void createQueryBuilderTestTypeTextKeyword() { String sampleQuery = "sample query"; String sampleField = "fieldA"; - query.createQueryBuilder(List.of(DSL.namedArgument("field", - new ReferenceExpression(sampleField, - OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "field", + new ReferenceExpression( + sampleField, + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(eq(sampleField), - eq(sampleQuery)); + verify(query).createBuilder(eq(sampleField), eq(sampleQuery)); } @Test @@ -58,12 +66,13 @@ void createQueryBuilderTestTypeText() { String sampleQuery = "sample query"; String sampleField = "fieldA"; - query.createQueryBuilder(List.of(DSL.namedArgument("field", - new ReferenceExpression(sampleField, OpenSearchTextType.of())), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "field", new ReferenceExpression(sampleField, OpenSearchTextType.of())), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(eq(sampleField), - eq(sampleQuery)); + verify(query).createBuilder(eq(sampleField), eq(sampleQuery)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java index e84ed14e43..89a10ad563 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.sort; import static org.hamcrest.MatcherAssert.assertThat; @@ -32,10 +31,7 @@ void build_sortbuilder_from_reference() { void build_sortbuilder_from_nested_function() { assertNotNull( sortQueryBuilder.build( - DSL.nested(DSL.ref("message.info", STRING)), - Sort.SortOption.DEFAULT_ASC - ) - ); + DSL.nested(DSL.ref("message.info", STRING)), Sort.SortOption.DEFAULT_ASC)); } @Test @@ -43,63 +39,56 @@ void build_sortbuilder_from_nested_function_with_path_param() { assertNotNull( sortQueryBuilder.build( DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), - Sort.SortOption.DEFAULT_ASC - ) - ); + Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_too_many_args_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING), - DSL.ref("message", STRING) - ), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> + sortQueryBuilder.build( + DSL.nested( + DSL.ref("message.info", STRING), + DSL.ref("message", STRING), + DSL.ref("message", STRING)), + Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_too_few_args_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested(), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> sortQueryBuilder.build(DSL.nested(), Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_invalid_arg_type_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested( - DSL.literal(1) - ), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> sortQueryBuilder.build(DSL.nested(DSL.literal(1)), Sort.SortOption.DEFAULT_ASC)); } @Test void build_sortbuilder_from_expression_should_throw_exception() { final IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> sortQueryBuilder.build( - new LiteralExpression(new ExprShortValue(1)), Sort.SortOption.DEFAULT_ASC)); + assertThrows( + IllegalStateException.class, + () -> + sortQueryBuilder.build( + new LiteralExpression(new ExprShortValue(1)), Sort.SortOption.DEFAULT_ASC)); assertThat(exception.getMessage(), Matchers.containsString("unsupported expression")); } @Test void build_sortbuilder_from_function_should_throw_exception() { final IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> sortQueryBuilder.build(DSL.equal(DSL.ref( - "intV", INTEGER), DSL.literal(1)), Sort.SortOption.DEFAULT_ASC)); + assertThrows( + IllegalStateException.class, + () -> + sortQueryBuilder.build( + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(1)), + Sort.SortOption.DEFAULT_ASC)); assertThat(exception.getMessage(), Matchers.containsString("unsupported expression")); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java index 494f3ff2d0..00d1c9ecd1 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -21,8 +20,7 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSystemIndexScanTest { - @Mock - private OpenSearchSystemRequest request; + @Mock private OpenSearchSystemRequest request; @Test public void queryData() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java index a483f2dad8..1afcfcdc86 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.hamcrest.MatcherAssert.assertThat; @@ -35,29 +34,23 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSystemIndexTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private Table table; + @Mock private Table table; @Test void testGetFieldTypesOfMetaTable() { OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("TABLE_CAT", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("TABLE_CAT", STRING))); } @Test void testGetFieldTypesOfMappingTable() { - OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, mappingTable( - "test_index")); + OpenSearchSystemIndex systemIndex = + new OpenSearchSystemIndex(client, mappingTable("test_index")); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("COLUMN_NAME", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("COLUMN_NAME", STRING))); } @Test @@ -69,8 +62,7 @@ void testIsExist() { @Test void testCreateTable() { Table systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); - assertThrows(UnsupportedOperationException.class, - () -> systemIndex.create(ImmutableMap.of())); + assertThrows(UnsupportedOperationException.class, () -> systemIndex.create(ImmutableMap.of())); } @Test @@ -78,11 +70,8 @@ void implement() { OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); NamedExpression projectExpr = named("TABLE_NAME", ref("TABLE_NAME", STRING)); - final PhysicalPlan plan = systemIndex.implement( - project( - relation(TABLE_INFO, table), - projectExpr - )); + final PhysicalPlan plan = + systemIndex.implement(project(relation(TABLE_INFO, table), projectExpr)); assertTrue(plan instanceof ProjectOperator); assertTrue(plan.getChild().get(0) instanceof OpenSearchSystemIndexScan); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java index 85b8889de3..0db87f89d4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.utils; import com.google.common.collect.ImmutableSet; @@ -36,15 +35,13 @@ public static List group(NamedExpression... exprs) { return Arrays.asList(exprs); } - public static List> sort(Expression expr1, - Sort.SortOption option1) { + public static List> sort( + Expression expr1, Sort.SortOption option1) { return Collections.singletonList(Pair.of(option1, expr1)); } - public static List> sort(Expression expr1, - Sort.SortOption option1, - Expression expr2, - Sort.SortOption option2) { + public static List> sort( + Expression expr1, Sort.SortOption option1, Expression expr2, Sort.SortOption option2) { return Arrays.asList(Pair.of(option1, expr1), Pair.of(option2, expr2)); } From ff059f0511fb86a9fb9327e67d42a25ca28c6b5b Mon Sep 17 00:00:00 2001 From: Yury-Fridlyand Date: Fri, 18 Aug 2023 15:13:03 -0700 Subject: [PATCH 2/6] Fix doctest data (#1902) Signed-off-by: Yury-Fridlyand --- doctest/test_data/nested_objects.json | 2 -- doctest/test_data/wildcard.json | 11 ----------- 2 files changed, 13 deletions(-) diff --git a/doctest/test_data/nested_objects.json b/doctest/test_data/nested_objects.json index fc5f56b4c5..bee976806e 100644 --- a/doctest/test_data/nested_objects.json +++ b/doctest/test_data/nested_objects.json @@ -1,4 +1,2 @@ -{"index":{"_id":"1"}} {"message":{"info":"a","author":"e","dayOfWeek":1},"comment":{"data":"ab","likes":3},"myNum":1,"someField":"b"} -{"index":{"_id":"2"}} {"message":{"info":"b","author":"f","dayOfWeek":2},"comment":{"data":"aa","likes":2},"myNum":2,"someField":"a"} diff --git a/doctest/test_data/wildcard.json b/doctest/test_data/wildcard.json index c91778d8ab..21256ed8dc 100644 --- a/doctest/test_data/wildcard.json +++ b/doctest/test_data/wildcard.json @@ -1,22 +1,11 @@ -{"index":{"_id":"0"}} {"Body":"test wildcard"} -{"index":{"_id":"1"}} {"Body":"test wildcard in the end of the text%"} -{"index":{"_id":"2"}} {"Body":"%test wildcard in the beginning of the text"} -{"index":{"_id":"3"}} {"Body":"test wildcard in % the middle of the text"} -{"index":{"_id":"4"}} {"Body":"test wildcard %% beside each other"} -{"index":{"_id":"5"}} {"Body":"test wildcard in the end of the text_"} -{"index":{"_id":"6"}} {"Body":"_test wildcard in the beginning of the text"} -{"index":{"_id":"7"}} {"Body":"test wildcard in _ the middle of the text"} -{"index":{"_id":"8"}} {"Body":"test wildcard __ beside each other"} -{"index":{"_id":"9"}} {"Body":"test backslash wildcard \\_"} -{"index":{"_id":"10"}} {"Body":"tEsT wIlDcArD sensitive cases"} From ac9b5d86aa18d39356740485c766d8206ef8b43e Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Fri, 18 Aug 2023 18:38:27 -0700 Subject: [PATCH 3/6] [Spotless] Applying Google Code Format for legacy directory (pt 1/4) #19 (#1988) * spotless apply for OpenSearch P1. Signed-off-by: Mitchell Gale * Manual spotless changes Signed-off-by: Mitchell Gale * spotless apply for OpenSearch P2. Signed-off-by: Mitchell Gale * 90 files checked after spotless apply for legacy Signed-off-by: Mitchell Gale * Added checkstyle ignore failures to legacy Signed-off-by: Mitchell Gale * Fixed comma issue Signed-off-by: Mitchell Gale * Spotless apply Signed-off-by: Mitchell Gale * Revert build.gradle Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale --- .../ComparisonOperatorBenchmark.java | 6 +- legacy/build.gradle | 3 + .../antlr/semantic/types/base/BaseType.java | 21 +- .../types/function/AggregateFunction.java | 70 +- .../types/operator/ComparisonOperator.java | 88 +- .../syntax/CaseInsensitiveCharStream.java | 92 +- .../visitor/AntlrSqlParseTreeVisitor.java | 667 ++++---- .../visitor/EarlyExitAnalysisException.java | 11 +- .../opensearch/sql/legacy/cursor/Cursor.java | 8 +- .../sql/legacy/cursor/CursorType.java | 45 +- .../sql/legacy/cursor/DefaultCursor.java | 249 +-- .../sql/legacy/domain/ColumnTypeProvider.java | 103 +- .../sql/legacy/domain/Condition.java | 721 +++++---- .../opensearch/sql/legacy/domain/Delete.java | 9 +- .../legacy/domain/bucketpath/BucketPath.java | 40 +- .../legacy/executor/AsyncRestExecutor.java | 253 +-- .../executor/ElasticDefaultRestExecutor.java | 151 +- .../legacy/executor/ElasticHitsExecutor.java | 9 +- .../legacy/executor/ElasticResultHandler.java | 44 +- .../sql/legacy/executor/csv/CSVResult.java | 129 +- .../executor/csv/CSVResultRestExecutor.java | 89 +- .../executor/csv/CSVResultsExtractor.java | 583 +++---- .../executor/csv/CsvExtractorException.java | 11 +- ...ursorActionRequestRestExecutorFactory.java | 20 +- .../cursor/CursorAsyncRestExecutor.java | 140 +- .../executor/cursor/CursorCloseExecutor.java | 116 +- .../executor/cursor/CursorRestExecutor.java | 11 +- .../executor/cursor/CursorResultExecutor.java | 157 +- .../format/BindingTupleResultSet.java | 76 +- .../sql/legacy/executor/format/DataRows.java | 127 +- .../executor/format/DateFieldFormatter.java | 305 ++-- .../legacy/executor/format/DateFormat.java | 226 +-- .../executor/format/DeleteResultSet.java | 49 +- .../executor/format/DescribeResultSet.java | 264 ++- .../executor/join/ElasticJoinExecutor.java | 421 ++--- .../executor/multi/ComperableHitResult.java | 109 +- .../builder/ArithmeticFunctionFactory.java | 326 ++-- .../core/builder/BinaryExpressionBuilder.java | 48 +- .../core/operator/BinaryScalarOperator.java | 86 +- .../operator/DoubleBinaryScalarOperator.java | 59 +- .../operator/DoubleUnaryScalarOperator.java | 43 +- .../expression/domain/BindingTuple.java | 67 +- .../sql/legacy/metrics/BasicCounter.java | 35 +- .../sql/legacy/metrics/Counter.java | 9 +- .../sql/legacy/parser/CaseWhenParser.java | 191 ++- .../sql/legacy/parser/ChildrenType.java | 84 +- .../sql/legacy/parser/ElasticLexer.java | 139 +- .../legacy/query/AggregationQueryAction.java | 776 ++++----- .../sql/legacy/query/DefaultQueryAction.java | 479 +++--- .../sql/legacy/query/DeleteQueryAction.java | 86 +- .../sql/legacy/query/DescribeQueryAction.java | 30 +- .../query/join/BackOffRetryStrategy.java | 311 ++-- .../sql/legacy/query/maker/AggMaker.java | 1437 +++++++++-------- .../core/BindingTupleQueryPlanner.java | 99 +- .../legacy/query/planner/core/ColumnNode.java | 19 +- .../sql/legacy/query/planner/core/Config.java | 224 ++- .../planner/physical/estimation/Cost.java | 18 +- .../physical/node/BatchPhysicalOperator.java | 119 +- .../physical/node/join/BlockHashJoin.java | 147 +- .../physical/node/join/CombinedRow.java | 39 +- .../physical/node/join/DefaultHashTable.java | 173 +- .../physical/node/scroll/BindingTupleRow.java | 43 +- .../planner/resource/blocksize/BlockSize.java | 49 +- .../AnonymizeSensitiveDataRule.java | 81 +- .../spatial/BoundingBoxFilterParams.java | 29 +- .../sql/legacy/spatial/CellFilterParams.java | 55 +- .../legacy/spatial/DistanceFilterParams.java | 29 +- .../antlr/semantic/types/BaseTypeTest.java | 142 +- .../visitor/AntlrSqlParseTreeVisitorTest.java | 167 +- .../executor/AsyncRestExecutorTest.java | 103 +- .../legacy/executor/csv/CSVResultTest.java | 173 +- .../format/DateFieldFormatterTest.java | 1312 ++++++++------- .../unittest/AggregationOptionTest.java | 79 +- .../sql/legacy/unittest/DateFormatTest.java | 419 ++--- .../legacy/unittest/DateFunctionsTest.java | 306 ++-- .../unittest/cursor/DefaultCursorTest.java | 72 +- .../domain/ColumnTypeProviderTest.java | 50 +- .../executor/DeleteResultSetTest.java | 82 +- .../format/BindingTupleResultSetTest.java | 69 +- .../format/CSVResultsExtractorTest.java | 39 +- .../expression/core/BinaryExpressionTest.java | 127 +- .../core/CompoundExpressionTest.java | 15 +- .../unittest/metrics/BasicCounterTest.java | 28 +- .../unittest/parser/BucketPathTest.java | 66 +- .../BindingTupleQueryPlannerExecuteTest.java | 130 +- .../query/DefaultQueryActionTest.java | 378 +++-- .../rewriter/inline/AliasInliningTests.java | 205 +-- .../unittest/utils/BackticksUnquoterTest.java | 44 +- .../sql/legacy/util/AggregationUtils.java | 83 +- .../sql/legacy/util/CheckScriptContents.java | 356 ++-- .../data/type/OpenSearchTextType.java | 2 +- .../value/OpenSearchExprValueFactoryTest.java | 2 +- 92 files changed, 7517 insertions(+), 7685 deletions(-) diff --git a/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java b/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java index d2642dd645..01b2068694 100644 --- a/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java +++ b/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java @@ -38,7 +38,7 @@ @Fork(value = 1) public class ComparisonOperatorBenchmark { - @Param(value = { "int", "string", "date" }) + @Param(value = {"int", "string", "date"}) private String testDataType; private final Map params = @@ -65,9 +65,7 @@ public void testGreaterOperator() { private void run(Function dsl) { ExprValue param = params.get(testDataType); - FunctionExpression func = dsl.apply(new Expression[] { - literal(param), literal(param) - }); + FunctionExpression func = dsl.apply(new Expression[] {literal(param), literal(param)}); func.valueOf(); } } diff --git a/legacy/build.gradle b/legacy/build.gradle index d89f7affe7..fce04ae9ba 100644 --- a/legacy/build.gradle +++ b/legacy/build.gradle @@ -53,6 +53,9 @@ compileJava { } } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + // TODO: Similarly, need to fix compiling errors in test source code compileTestJava.options.warnings = false compileTestJava { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java index 280b7b4c76..37e0c4d4b3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java @@ -3,24 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.base; import java.util.List; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Base type interface - */ +/** Base type interface */ public interface BaseType extends Type { - @Override - default Type construct(List others) { - return this; - } + @Override + default Type construct(List others) { + return this; + } - @Override - default String usage() { - return getName(); - } + @Override + default String usage() { + return getName(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java index 37e4091b0a..9cebf3dda6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.function; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DOUBLE; @@ -15,41 +14,38 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.TypeExpression; -/** - * Aggregate function - */ +/** Aggregate function */ public enum AggregateFunction implements TypeExpression { - COUNT( - func().to(INTEGER), // COUNT(*) - func(OPENSEARCH_TYPE).to(INTEGER) - ), - MAX(func(T(NUMBER)).to(T)), - MIN(func(T(NUMBER)).to(T)), - AVG(func(T(NUMBER)).to(DOUBLE)), - SUM(func(T(NUMBER)).to(T)); - - private TypeExpressionSpec[] specifications; - - AggregateFunction(TypeExpressionSpec... specifications) { - this.specifications = specifications; - } - - @Override - public String getName() { - return name(); - } - - @Override - public TypeExpressionSpec[] specifications() { - return specifications; - } - - private static TypeExpressionSpec func(Type... argTypes) { - return new TypeExpressionSpec().map(argTypes); - } - - @Override - public String toString() { - return "Function [" + name() + "]"; - } + COUNT( + func().to(INTEGER), // COUNT(*) + func(OPENSEARCH_TYPE).to(INTEGER)), + MAX(func(T(NUMBER)).to(T)), + MIN(func(T(NUMBER)).to(T)), + AVG(func(T(NUMBER)).to(DOUBLE)), + SUM(func(T(NUMBER)).to(T)); + + private TypeExpressionSpec[] specifications; + + AggregateFunction(TypeExpressionSpec... specifications) { + this.specifications = specifications; + } + + @Override + public String getName() { + return name(); + } + + @Override + public TypeExpressionSpec[] specifications() { + return specifications; + } + + private static TypeExpressionSpec func(Type... argTypes) { + return new TypeExpressionSpec().map(argTypes); + } + + @Override + public String toString() { + return "Function [" + name() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java index 993d996df3..19e8f85aa3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.operator; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; @@ -12,53 +11,50 @@ import java.util.List; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Type for comparison operator - */ +/** Type for comparison operator */ public enum ComparisonOperator implements Type { - - EQUAL("="), - NOT_EQUAL("<>"), - NOT_EQUAL2("!="), - GREATER_THAN(">"), - GREATER_THAN_OR_EQUAL_TO(">="), - SMALLER_THAN("<"), - SMALLER_THAN_OR_EQUAL_TO("<="), - IS("IS"); - - /** Actual name representing the operator */ - private final String name; - - ComparisonOperator(String name) { - this.name = name; - } - - @Override - public String getName() { - return name; - } - - @Override - public Type construct(List actualArgs) { - if (actualArgs.size() != 2) { - return TYPE_ERROR; - } - - Type leftType = actualArgs.get(0); - Type rightType = actualArgs.get(1); - if (leftType.isCompatible(rightType) || rightType.isCompatible(leftType)) { - return BOOLEAN; - } - return TYPE_ERROR; - } - - @Override - public String usage() { - return "Please use compatible types from each side."; + EQUAL("="), + NOT_EQUAL("<>"), + NOT_EQUAL2("!="), + GREATER_THAN(">"), + GREATER_THAN_OR_EQUAL_TO(">="), + SMALLER_THAN("<"), + SMALLER_THAN_OR_EQUAL_TO("<="), + IS("IS"); + + /** Actual name representing the operator */ + private final String name; + + ComparisonOperator(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public Type construct(List actualArgs) { + if (actualArgs.size() != 2) { + return TYPE_ERROR; } - @Override - public String toString() { - return "Operator [" + getName() + "]"; + Type leftType = actualArgs.get(0); + Type rightType = actualArgs.get(1); + if (leftType.isCompatible(rightType) || rightType.isCompatible(leftType)) { + return BOOLEAN; } + return TYPE_ERROR; + } + + @Override + public String usage() { + return "Please use compatible types from each side."; + } + + @Override + public String toString() { + return "Operator [" + getName() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java index de7e60e9f3..c7cb212826 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.syntax; import org.antlr.v4.runtime.CharStream; @@ -11,63 +10,64 @@ import org.antlr.v4.runtime.misc.Interval; /** - * Custom stream to convert character to upper case for case insensitive grammar before sending to lexer. + * Custom stream to convert character to upper case for case insensitive grammar before sending to + * lexer. */ public class CaseInsensitiveCharStream implements CharStream { - /** Character stream */ - private final CharStream charStream; + /** Character stream */ + private final CharStream charStream; - public CaseInsensitiveCharStream(String sql) { - this.charStream = CharStreams.fromString(sql); - } + public CaseInsensitiveCharStream(String sql) { + this.charStream = CharStreams.fromString(sql); + } - @Override - public String getText(Interval interval) { - return charStream.getText(interval); - } + @Override + public String getText(Interval interval) { + return charStream.getText(interval); + } - @Override - public void consume() { - charStream.consume(); - } + @Override + public void consume() { + charStream.consume(); + } - @Override - public int LA(int i) { - int c = charStream.LA(i); - if (c <= 0) { - return c; - } - return Character.toUpperCase(c); + @Override + public int LA(int i) { + int c = charStream.LA(i); + if (c <= 0) { + return c; } + return Character.toUpperCase(c); + } - @Override - public int mark() { - return charStream.mark(); - } + @Override + public int mark() { + return charStream.mark(); + } - @Override - public void release(int marker) { - charStream.release(marker); - } + @Override + public void release(int marker) { + charStream.release(marker); + } - @Override - public int index() { - return charStream.index(); - } + @Override + public int index() { + return charStream.index(); + } - @Override - public void seek(int index) { - charStream.seek(index); - } + @Override + public void seek(int index) { + charStream.seek(index); + } - @Override - public int size() { - return charStream.size(); - } + @Override + public int size() { + return charStream.size(); + } - @Override - public String getSourceName() { - return charStream.getSourceName(); - } + @Override + public String getSourceName() { + return charStream.getSourceName(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java index 90a8274568..00db9a6591 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import static java.util.Collections.emptyList; @@ -55,78 +54,74 @@ import org.opensearch.sql.legacy.antlr.parser.OpenSearchLegacySqlParser.TableNamePatternContext; import org.opensearch.sql.legacy.antlr.parser.OpenSearchLegacySqlParserBaseVisitor; -/** - * ANTLR parse tree visitor to drive the analysis process. - */ -public class AntlrSqlParseTreeVisitor extends OpenSearchLegacySqlParserBaseVisitor { - - /** Generic visitor to perform the real action on parse tree */ - private final GenericSqlParseTreeVisitor visitor; - - public AntlrSqlParseTreeVisitor(GenericSqlParseTreeVisitor visitor) { - this.visitor = visitor; - } - - @Override - public T visitRoot(RootContext ctx) { - visitor.visitRoot(); - return super.visitRoot(ctx); - } - - @Override - public T visitUnionSelect(UnionSelectContext ctx) { - T union = visitor.visitOperator("UNION"); - return reduce(union, - asList( - ctx.querySpecification(), - ctx.unionStatement() - ) - ); - } - - @Override - public T visitMinusSelect(MinusSelectContext ctx) { - T minus = visitor.visitOperator("MINUS"); - return reduce(minus, asList(ctx.querySpecification(), ctx.minusStatement())); - } - - @Override - public T visitInPredicate(InPredicateContext ctx) { - T in = visitor.visitOperator("IN"); - PredicateContext field = ctx.predicate(); - ParserRuleContext subquery = (ctx.selectStatement() != null) ? ctx.selectStatement() : ctx.expressions(); - return reduce(in, Arrays.asList(field, subquery)); - } - - @Override - public T visitTableSources(TableSourcesContext ctx) { - if (ctx.tableSource().size() < 2) { - return super.visitTableSources(ctx); - } - T commaJoin = visitor.visitOperator("JOIN"); - return reduce(commaJoin, ctx.tableSource()); - } - - @Override - public T visitTableSourceBase(TableSourceBaseContext ctx) { - if (ctx.joinPart().isEmpty()) { - return super.visitTableSourceBase(ctx); - } - T join = visitor.visitOperator("JOIN"); - return reduce(join, asList(ctx.tableSourceItem(), ctx.joinPart())); - } - - @Override - public T visitInnerJoin(InnerJoinContext ctx) { - return visitJoin(ctx.children, ctx.tableSourceItem()); - } - - @Override - public T visitOuterJoin(OuterJoinContext ctx) { - return visitJoin(ctx.children, ctx.tableSourceItem()); - } +/** ANTLR parse tree visitor to drive the analysis process. */ +public class AntlrSqlParseTreeVisitor + extends OpenSearchLegacySqlParserBaseVisitor { + + /** Generic visitor to perform the real action on parse tree */ + private final GenericSqlParseTreeVisitor visitor; + + public AntlrSqlParseTreeVisitor(GenericSqlParseTreeVisitor visitor) { + this.visitor = visitor; + } + + @Override + public T visitRoot(RootContext ctx) { + visitor.visitRoot(); + return super.visitRoot(ctx); + } + + @Override + public T visitUnionSelect(UnionSelectContext ctx) { + T union = visitor.visitOperator("UNION"); + return reduce(union, asList(ctx.querySpecification(), ctx.unionStatement())); + } + + @Override + public T visitMinusSelect(MinusSelectContext ctx) { + T minus = visitor.visitOperator("MINUS"); + return reduce(minus, asList(ctx.querySpecification(), ctx.minusStatement())); + } + + @Override + public T visitInPredicate(InPredicateContext ctx) { + T in = visitor.visitOperator("IN"); + PredicateContext field = ctx.predicate(); + ParserRuleContext subquery = + (ctx.selectStatement() != null) ? ctx.selectStatement() : ctx.expressions(); + return reduce(in, Arrays.asList(field, subquery)); + } + + @Override + public T visitTableSources(TableSourcesContext ctx) { + if (ctx.tableSource().size() < 2) { + return super.visitTableSources(ctx); + } + T commaJoin = visitor.visitOperator("JOIN"); + return reduce(commaJoin, ctx.tableSource()); + } + + @Override + public T visitTableSourceBase(TableSourceBaseContext ctx) { + if (ctx.joinPart().isEmpty()) { + return super.visitTableSourceBase(ctx); + } + T join = visitor.visitOperator("JOIN"); + return reduce(join, asList(ctx.tableSourceItem(), ctx.joinPart())); + } + + @Override + public T visitInnerJoin(InnerJoinContext ctx) { + return visitJoin(ctx.children, ctx.tableSourceItem()); + } + + @Override + public T visitOuterJoin(OuterJoinContext ctx) { + return visitJoin(ctx.children, ctx.tableSourceItem()); + } /** + *
      * Enforce visit order because ANTLR is generic and unaware.
      *
      * Visiting order is:
@@ -137,275 +132,273 @@ public T visitOuterJoin(OuterJoinContext ctx) {
      *     => HAVING
      *      => ORDER BY
      *       => LIMIT
+     *  
*/ @Override public T visitQuerySpecification(QuerySpecificationContext ctx) { visitor.visitQuery(); - // Always visit FROM clause first to define symbols - FromClauseContext fromClause = ctx.fromClause(); - visit(fromClause.tableSources()); - - if (fromClause.whereExpr != null) { - visit(fromClause.whereExpr); - } - - // Note visit GROUP BY and HAVING later than SELECT for alias definition - T result = visitSelectElements(ctx.selectElements()); - fromClause.groupByItem().forEach(this::visit); - if (fromClause.havingExpr != null) { - visit(fromClause.havingExpr); - } - - if (ctx.orderByClause() != null) { - visitOrderByClause(ctx.orderByClause()); - } - if (ctx.limitClause() != null) { - visitLimitClause(ctx.limitClause()); - } - - visitor.endVisitQuery(); - return result; - } - - @Override - public T visitSubqueryTableItem(SubqueryTableItemContext ctx) { - throw new EarlyExitAnalysisException("Exit when meeting subquery in from"); - } - - /** Visit here instead of tableName because we need alias */ - @Override - public T visitAtomTableItem(AtomTableItemContext ctx) { - String alias = (ctx.alias == null) ? "" : ctx.alias.getText(); - T result = visit(ctx.tableName()); - visitor.visitAs(alias, result); - return result; - } - - @Override - public T visitSimpleTableName(SimpleTableNameContext ctx) { - return visitor.visitIndexName(ctx.getText()); - } - - @Override - public T visitTableNamePattern(TableNamePatternContext ctx) { - return visitor.visitIndexName(ctx.getText()); - } - - @Override - public T visitTableAndTypeName(TableAndTypeNameContext ctx) { - return visitor.visitIndexName(ctx.uid(0).getText()); - } - - @Override - public T visitFullColumnName(FullColumnNameContext ctx) { - return visitor.visitFieldName(ctx.getText()); - } - - @Override - public T visitUdfFunctionCall(UdfFunctionCallContext ctx) { - String funcName = ctx.fullId().getText(); - T func = visitor.visitFunctionName(funcName); - return reduce(func, ctx.functionArgs()); - } - - @Override - public T visitScalarFunctionCall(ScalarFunctionCallContext ctx) { - UnsupportedSemanticVerifier.verify(ctx); - T func = visit(ctx.scalarFunctionName()); - return reduce(func, ctx.functionArgs()); - } - - @Override - public T visitMathOperator(MathOperatorContext ctx) { - UnsupportedSemanticVerifier.verify(ctx); - return super.visitMathOperator(ctx); - } - - @Override - public T visitRegexpPredicate(RegexpPredicateContext ctx) { - UnsupportedSemanticVerifier.verify(ctx); - return super.visitRegexpPredicate(ctx); - } - - @Override - public T visitSelectElements(SelectElementsContext ctx) { - return visitor.visitSelect(ctx.selectElement(). - stream(). - map(this::visit). - collect(Collectors.toList())); - } - - @Override - public T visitSelectStarElement(OpenSearchLegacySqlParser.SelectStarElementContext ctx) { - return visitor.visitSelectAllColumn(); - } - - @Override - public T visitSelectColumnElement(SelectColumnElementContext ctx) { - return visitSelectItem(ctx.fullColumnName(), ctx.uid()); - } - - @Override - public T visitSelectFunctionElement(SelectFunctionElementContext ctx) { - return visitSelectItem(ctx.functionCall(), ctx.uid()); - } - - @Override - public T visitSelectExpressionElement(SelectExpressionElementContext ctx) { - return visitSelectItem(ctx.expression(), ctx.uid()); - } - - @Override - public T visitAggregateWindowedFunction(AggregateWindowedFunctionContext ctx) { - String funcName = ctx.getChild(0).getText(); - T func = visitor.visitFunctionName(funcName); - return reduce(func, ctx.functionArg()); - } - - @Override - public T visitFunctionNameBase(FunctionNameBaseContext ctx) { - return visitor.visitFunctionName(ctx.getText()); - } - - @Override - public T visitBinaryComparisonPredicate(BinaryComparisonPredicateContext ctx) { - if (isNamedArgument(ctx)) { // Essentially named argument is assign instead of comparison - return defaultResult(); - } - - T op = visit(ctx.comparisonOperator()); - return reduce(op, Arrays.asList(ctx.left, ctx.right)); - } - - @Override - public T visitIsExpression(IsExpressionContext ctx) { - T op = visitor.visitOperator("IS"); - return op.reduce(Arrays.asList( - visit(ctx.predicate()), - visitor.visitBoolean(ctx.testValue.getText())) - ); - } - - @Override - public T visitConvertedDataType(OpenSearchLegacySqlParser.ConvertedDataTypeContext ctx) { - if (ctx.getChild(0) != null && !Strings.isNullOrEmpty(ctx.getChild(0).getText())) { - return visitor.visitConvertedType(ctx.getChild(0).getText()); - } else { - return super.visitConvertedDataType(ctx); - } - } - - @Override - public T visitComparisonOperator(ComparisonOperatorContext ctx) { - return visitor.visitOperator(ctx.getText()); - } - - @Override - public T visitConstant(ConstantContext ctx) { - if (ctx.REAL_LITERAL() != null) { - return visitor.visitFloat(ctx.getText()); - } - if (ctx.dateType != null) { - return visitor.visitDate(ctx.getText()); - } - if (ctx.nullLiteral != null) { - return visitor.visitNull(); - } - return super.visitConstant(ctx); - } - - @Override - public T visitStringLiteral(StringLiteralContext ctx) { - return visitor.visitString(ctx.getText()); - } - - @Override - public T visitDecimalLiteral(DecimalLiteralContext ctx) { - return visitor.visitInteger(ctx.getText()); - } - - @Override - public T visitBooleanLiteral(BooleanLiteralContext ctx) { - return visitor.visitBoolean(ctx.getText()); - } - - @Override - protected T defaultResult() { - return visitor.defaultValue(); - } - - @Override - protected T aggregateResult(T aggregate, T nextResult) { - if (nextResult != defaultResult()) { // Simply return non-default value for now - return nextResult; - } - return aggregate; - } - - /** - * Named argument, ex. TOPHITS('size'=3), is under FunctionArgs -> Predicate - * And the function name should be contained in openSearchFunctionNameBase - */ - private boolean isNamedArgument(BinaryComparisonPredicateContext ctx) { - if (ctx.getParent() != null && ctx.getParent().getParent() != null - && ctx.getParent().getParent().getParent() != null - && ctx.getParent().getParent().getParent() instanceof ScalarFunctionCallContext) { - - ScalarFunctionCallContext parent = (ScalarFunctionCallContext) ctx.getParent().getParent().getParent(); - return parent.scalarFunctionName().functionNameBase().openSearchFunctionNameBase() != null; - } - return false; - } - - /** Enforce visiting result of table instead of ON clause as result */ - private T visitJoin(List children, TableSourceItemContext tableCtx) { - T result = defaultResult(); - for (ParseTree child : children) { - if (child == tableCtx) { - result = visit(tableCtx); - } else { - visit(child); - } - } - return result; - } - - /** Visit select items for type check and alias definition */ - private T visitSelectItem(ParserRuleContext item, UidContext uid) { - T result = visit(item); - if (uid != null) { - visitor.visitAs(uid.getText(), result); - } - return result; - } - - private T reduce(T reducer, ParserRuleContext ctx) { - return reduce(reducer, (ctx == null) ? emptyList() : ctx.children); - } - - /** Make constructor apply arguments and return result type */ - private T reduce(T reducer, List nodes) { - List args; - if (nodes == null) { - args = emptyList(); - } else { - args = nodes.stream(). - map(this::visit). - filter(type -> type != defaultResult()). - collect(Collectors.toList()); - } - return reducer.reduce(args); - } - - /** Combine an item and a list of items to a single list */ - private - List asList(Node1 first, List rest) { - - List result = new ArrayList<>(singleton(first)); - result.addAll(rest); - return result; - } - + // Always visit FROM clause first to define symbols + FromClauseContext fromClause = ctx.fromClause(); + visit(fromClause.tableSources()); + + if (fromClause.whereExpr != null) { + visit(fromClause.whereExpr); + } + + // Note visit GROUP BY and HAVING later than SELECT for alias definition + T result = visitSelectElements(ctx.selectElements()); + fromClause.groupByItem().forEach(this::visit); + if (fromClause.havingExpr != null) { + visit(fromClause.havingExpr); + } + + if (ctx.orderByClause() != null) { + visitOrderByClause(ctx.orderByClause()); + } + if (ctx.limitClause() != null) { + visitLimitClause(ctx.limitClause()); + } + + visitor.endVisitQuery(); + return result; + } + + @Override + public T visitSubqueryTableItem(SubqueryTableItemContext ctx) { + throw new EarlyExitAnalysisException("Exit when meeting subquery in from"); + } + + /** Visit here instead of tableName because we need alias */ + @Override + public T visitAtomTableItem(AtomTableItemContext ctx) { + String alias = (ctx.alias == null) ? "" : ctx.alias.getText(); + T result = visit(ctx.tableName()); + visitor.visitAs(alias, result); + return result; + } + + @Override + public T visitSimpleTableName(SimpleTableNameContext ctx) { + return visitor.visitIndexName(ctx.getText()); + } + + @Override + public T visitTableNamePattern(TableNamePatternContext ctx) { + return visitor.visitIndexName(ctx.getText()); + } + + @Override + public T visitTableAndTypeName(TableAndTypeNameContext ctx) { + return visitor.visitIndexName(ctx.uid(0).getText()); + } + + @Override + public T visitFullColumnName(FullColumnNameContext ctx) { + return visitor.visitFieldName(ctx.getText()); + } + + @Override + public T visitUdfFunctionCall(UdfFunctionCallContext ctx) { + String funcName = ctx.fullId().getText(); + T func = visitor.visitFunctionName(funcName); + return reduce(func, ctx.functionArgs()); + } + + @Override + public T visitScalarFunctionCall(ScalarFunctionCallContext ctx) { + UnsupportedSemanticVerifier.verify(ctx); + T func = visit(ctx.scalarFunctionName()); + return reduce(func, ctx.functionArgs()); + } + + @Override + public T visitMathOperator(MathOperatorContext ctx) { + UnsupportedSemanticVerifier.verify(ctx); + return super.visitMathOperator(ctx); + } + + @Override + public T visitRegexpPredicate(RegexpPredicateContext ctx) { + UnsupportedSemanticVerifier.verify(ctx); + return super.visitRegexpPredicate(ctx); + } + + @Override + public T visitSelectElements(SelectElementsContext ctx) { + return visitor.visitSelect( + ctx.selectElement().stream().map(this::visit).collect(Collectors.toList())); + } + + @Override + public T visitSelectStarElement(OpenSearchLegacySqlParser.SelectStarElementContext ctx) { + return visitor.visitSelectAllColumn(); + } + + @Override + public T visitSelectColumnElement(SelectColumnElementContext ctx) { + return visitSelectItem(ctx.fullColumnName(), ctx.uid()); + } + + @Override + public T visitSelectFunctionElement(SelectFunctionElementContext ctx) { + return visitSelectItem(ctx.functionCall(), ctx.uid()); + } + + @Override + public T visitSelectExpressionElement(SelectExpressionElementContext ctx) { + return visitSelectItem(ctx.expression(), ctx.uid()); + } + + @Override + public T visitAggregateWindowedFunction(AggregateWindowedFunctionContext ctx) { + String funcName = ctx.getChild(0).getText(); + T func = visitor.visitFunctionName(funcName); + return reduce(func, ctx.functionArg()); + } + + @Override + public T visitFunctionNameBase(FunctionNameBaseContext ctx) { + return visitor.visitFunctionName(ctx.getText()); + } + + @Override + public T visitBinaryComparisonPredicate(BinaryComparisonPredicateContext ctx) { + if (isNamedArgument(ctx)) { // Essentially named argument is assign instead of comparison + return defaultResult(); + } + + T op = visit(ctx.comparisonOperator()); + return reduce(op, Arrays.asList(ctx.left, ctx.right)); + } + + @Override + public T visitIsExpression(IsExpressionContext ctx) { + T op = visitor.visitOperator("IS"); + return op.reduce( + Arrays.asList(visit(ctx.predicate()), visitor.visitBoolean(ctx.testValue.getText()))); + } + + @Override + public T visitConvertedDataType(OpenSearchLegacySqlParser.ConvertedDataTypeContext ctx) { + if (ctx.getChild(0) != null && !Strings.isNullOrEmpty(ctx.getChild(0).getText())) { + return visitor.visitConvertedType(ctx.getChild(0).getText()); + } else { + return super.visitConvertedDataType(ctx); + } + } + + @Override + public T visitComparisonOperator(ComparisonOperatorContext ctx) { + return visitor.visitOperator(ctx.getText()); + } + + @Override + public T visitConstant(ConstantContext ctx) { + if (ctx.REAL_LITERAL() != null) { + return visitor.visitFloat(ctx.getText()); + } + if (ctx.dateType != null) { + return visitor.visitDate(ctx.getText()); + } + if (ctx.nullLiteral != null) { + return visitor.visitNull(); + } + return super.visitConstant(ctx); + } + + @Override + public T visitStringLiteral(StringLiteralContext ctx) { + return visitor.visitString(ctx.getText()); + } + + @Override + public T visitDecimalLiteral(DecimalLiteralContext ctx) { + return visitor.visitInteger(ctx.getText()); + } + + @Override + public T visitBooleanLiteral(BooleanLiteralContext ctx) { + return visitor.visitBoolean(ctx.getText()); + } + + @Override + protected T defaultResult() { + return visitor.defaultValue(); + } + + @Override + protected T aggregateResult(T aggregate, T nextResult) { + if (nextResult != defaultResult()) { // Simply return non-default value for now + return nextResult; + } + return aggregate; + } + + /** + * Named argument, ex. TOPHITS('size'=3), is under FunctionArgs -> Predicate And the function name + * should be contained in openSearchFunctionNameBase + */ + private boolean isNamedArgument(BinaryComparisonPredicateContext ctx) { + if (ctx.getParent() != null + && ctx.getParent().getParent() != null + && ctx.getParent().getParent().getParent() != null + && ctx.getParent().getParent().getParent() instanceof ScalarFunctionCallContext) { + + ScalarFunctionCallContext parent = + (ScalarFunctionCallContext) ctx.getParent().getParent().getParent(); + return parent.scalarFunctionName().functionNameBase().openSearchFunctionNameBase() != null; + } + return false; + } + + /** Enforce visiting result of table instead of ON clause as result */ + private T visitJoin(List children, TableSourceItemContext tableCtx) { + T result = defaultResult(); + for (ParseTree child : children) { + if (child == tableCtx) { + result = visit(tableCtx); + } else { + visit(child); + } + } + return result; + } + + /** Visit select items for type check and alias definition */ + private T visitSelectItem(ParserRuleContext item, UidContext uid) { + T result = visit(item); + if (uid != null) { + visitor.visitAs(uid.getText(), result); + } + return result; + } + + private T reduce(T reducer, ParserRuleContext ctx) { + return reduce(reducer, (ctx == null) ? emptyList() : ctx.children); + } + + /** Make constructor apply arguments and return result type */ + private T reduce(T reducer, List nodes) { + List args; + if (nodes == null) { + args = emptyList(); + } else { + args = + nodes.stream() + .map(this::visit) + .filter(type -> type != defaultResult()) + .collect(Collectors.toList()); + } + return reducer.reduce(args); + } + + /** Combine an item and a list of items to a single list */ + private List asList( + Node1 first, List rest) { + + List result = new ArrayList<>(singleton(first)); + result.addAll(rest); + return result; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java index b0bd01a093..cf583aab40 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; -/** - * Exit visitor early due to some reason. - */ +/** Exit visitor early due to some reason. */ public class EarlyExitAnalysisException extends RuntimeException { - public EarlyExitAnalysisException(String message) { - super(message); - } + public EarlyExitAnalysisException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java index d3985259dd..8cc83a5fe2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java @@ -3,19 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; - public interface Cursor { - NullCursor NULL_CURSOR = new NullCursor(); + NullCursor NULL_CURSOR = new NullCursor(); /** - * All cursor's are of the form : + * All cursor's are of the form :
* The serialized form before encoding is upto Cursor implementation */ String generateCursorId(); - CursorType getType(); + CursorType getType(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java index 7c96cb8835..fea47e7e39 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java @@ -3,42 +3,41 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; import java.util.HashMap; import java.util.Map; /** - * Different types queries for which cursor is supported. - * The result execution, and cursor genreation/parsing will depend on the cursor type. - * NullCursor is the placeholder implementation in case of non-cursor query. + * Different types queries for which cursor is supported. The result execution, and cursor + * generation/parsing will depend on the cursor type. NullCursor is the placeholder implementation + * in case of non-cursor query. */ public enum CursorType { - NULL(null), - DEFAULT("d"), - AGGREGATION("a"), - JOIN("j"); + NULL(null), + DEFAULT("d"), + AGGREGATION("a"), + JOIN("j"); - public String id; + public String id; - CursorType(String id) { - this.id = id; - } + CursorType(String id) { + this.id = id; + } - public String getId() { - return this.id; - } + public String getId() { + return this.id; + } - public static final Map LOOKUP = new HashMap<>(); + public static final Map LOOKUP = new HashMap<>(); - static { - for (CursorType type : CursorType.values()) { - LOOKUP.put(type.getId(), type); - } + static { + for (CursorType type : CursorType.values()) { + LOOKUP.put(type.getId(), type); } + } - public static CursorType getById(String id) { - return LOOKUP.getOrDefault(id, NULL); - } + public static CursorType getById(String id) { + return LOOKUP.getOrDefault(id, NULL); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java index 856c1e5e2b..c5be0066fc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; import com.google.common.base.Strings; @@ -21,9 +20,8 @@ import org.json.JSONObject; import org.opensearch.sql.legacy.executor.format.Schema; - /** - * Minimum metdata that will be serialized for generating cursorId for + * Minimum metdata that will be serialized for generating cursorId for
* SELECT .... FROM .. ORDER BY .... queries */ @Getter @@ -31,130 +29,135 @@ @NoArgsConstructor public class DefaultCursor implements Cursor { - /** Make sure all keys are unique to prevent overriding - * and as small as possible to make cursor compact - */ - private static final String FETCH_SIZE = "f"; - private static final String ROWS_LEFT = "l"; - private static final String INDEX_PATTERN = "i"; - private static final String SCROLL_ID = "s"; - private static final String SCHEMA_COLUMNS = "c"; - private static final String FIELD_ALIAS_MAP = "a"; - - /** To get mappings for index to check if type is date needed for - * @see org.opensearch.sql.legacy.executor.format.DateFieldFormatter */ - @NonNull - private String indexPattern; - - /** List of Schema.Column for maintaining field order and generating null values of missing fields */ - @NonNull - private List columns; - - /** To delegate to correct cursor handler to get next page*/ - private final CursorType type = CursorType.DEFAULT; - + /** + * Make sure all keys are unique to prevent overriding and as small as possible to make cursor + * compact + */ + private static final String FETCH_SIZE = "f"; + + private static final String ROWS_LEFT = "l"; + private static final String INDEX_PATTERN = "i"; + private static final String SCROLL_ID = "s"; + private static final String SCHEMA_COLUMNS = "c"; + private static final String FIELD_ALIAS_MAP = "a"; + + /** + * To get mappings for index to check if type is date needed for + * + * @see org.opensearch.sql.legacy.executor.format.DateFieldFormatter + */ + @NonNull private String indexPattern; + + /** + * List of Schema.Column for maintaining field order and generating null values of missing fields + */ + @NonNull private List columns; + + /** To delegate to correct cursor handler to get next page */ + private final CursorType type = CursorType.DEFAULT; + + /** + * Truncate the @see DataRows to respect LIMIT clause and/or to identify last page to close scroll + * context. docsLeft is decremented by fetch_size for call to get page of result. + */ + private long rowsLeft; + + /** + * @see org.opensearch.sql.legacy.executor.format.SelectResultSet + */ + @NonNull private Map fieldAliasMap; + + /** To get next batch of result */ + private String scrollId; + + /** To reduce the number of rows left by fetchSize */ + @NonNull private Integer fetchSize; + + private Integer limit; + + @Override + public CursorType getType() { + return type; + } + + @Override + public String generateCursorId() { + if (rowsLeft <= 0 || Strings.isNullOrEmpty(scrollId)) { + return null; + } + JSONObject json = new JSONObject(); + json.put(FETCH_SIZE, fetchSize); + json.put(ROWS_LEFT, rowsLeft); + json.put(INDEX_PATTERN, indexPattern); + json.put(SCROLL_ID, scrollId); + json.put(SCHEMA_COLUMNS, getSchemaAsJson()); + json.put(FIELD_ALIAS_MAP, fieldAliasMap); + return String.format("%s:%s", type.getId(), encodeCursor(json)); + } + + public static DefaultCursor from(String cursorId) { /** - * Truncate the @see DataRows to respect LIMIT clause and/or to identify last page to close scroll context. - * docsLeft is decremented by fetch_size for call to get page of result. + * It is assumed that cursorId here is the second part of the original cursor passed by the + * client after removing first part which identifies cursor type */ - private long rowsLeft; - - /** @see org.opensearch.sql.legacy.executor.format.SelectResultSet */ - @NonNull - private Map fieldAliasMap; - - /** To get next batch of result */ - private String scrollId; - - /** To reduce the number of rows left by fetchSize */ - @NonNull - private Integer fetchSize; - - private Integer limit; - - @Override - public CursorType getType() { - return type; - } - - @Override - public String generateCursorId() { - if (rowsLeft <=0 || Strings.isNullOrEmpty(scrollId)) { - return null; - } - JSONObject json = new JSONObject(); - json.put(FETCH_SIZE, fetchSize); - json.put(ROWS_LEFT, rowsLeft); - json.put(INDEX_PATTERN, indexPattern); - json.put(SCROLL_ID, scrollId); - json.put(SCHEMA_COLUMNS, getSchemaAsJson()); - json.put(FIELD_ALIAS_MAP, fieldAliasMap); - return String.format("%s:%s", type.getId(), encodeCursor(json)); - } - - public static DefaultCursor from(String cursorId) { - /** - * It is assumed that cursorId here is the second part of the original cursor passed - * by the client after removing first part which identifies cursor type - */ - JSONObject json = decodeCursor(cursorId); - DefaultCursor cursor = new DefaultCursor(); - cursor.setFetchSize(json.getInt(FETCH_SIZE)); - cursor.setRowsLeft(json.getLong(ROWS_LEFT)); - cursor.setIndexPattern(json.getString(INDEX_PATTERN)); - cursor.setScrollId(json.getString(SCROLL_ID)); - cursor.setColumns(getColumnsFromSchema(json.getJSONArray(SCHEMA_COLUMNS))); - cursor.setFieldAliasMap(fieldAliasMap(json.getJSONObject(FIELD_ALIAS_MAP))); - - return cursor; - } - - private JSONArray getSchemaAsJson() { - JSONArray schemaJson = new JSONArray(); - - for (Schema.Column column : columns) { - schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); - } - - return schemaJson; + JSONObject json = decodeCursor(cursorId); + DefaultCursor cursor = new DefaultCursor(); + cursor.setFetchSize(json.getInt(FETCH_SIZE)); + cursor.setRowsLeft(json.getLong(ROWS_LEFT)); + cursor.setIndexPattern(json.getString(INDEX_PATTERN)); + cursor.setScrollId(json.getString(SCROLL_ID)); + cursor.setColumns(getColumnsFromSchema(json.getJSONArray(SCHEMA_COLUMNS))); + cursor.setFieldAliasMap(fieldAliasMap(json.getJSONObject(FIELD_ALIAS_MAP))); + + return cursor; + } + + private JSONArray getSchemaAsJson() { + JSONArray schemaJson = new JSONArray(); + + for (Schema.Column column : columns) { + schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); } - private JSONObject schemaEntry(String name, String alias, String type) { - JSONObject entry = new JSONObject(); - entry.put("name", name); - if (alias != null) { - entry.put("alias", alias); - } - entry.put("type", type); - return entry; - } - - private static String encodeCursor(JSONObject cursorJson) { - return Base64.getEncoder().encodeToString(cursorJson.toString().getBytes()); - } - - private static JSONObject decodeCursor(String cursorId) { - return new JSONObject(new String(Base64.getDecoder().decode(cursorId))); - } - - private static Map fieldAliasMap(JSONObject json) { - Map fieldToAliasMap = new HashMap<>(); - json.keySet().forEach(key -> fieldToAliasMap.put(key, json.get(key).toString())); - return fieldToAliasMap; - } + return schemaJson; + } - private static List getColumnsFromSchema(JSONArray schema) { - List columns = IntStream. - range(0, schema.length()). - mapToObj(i -> { - JSONObject jsonColumn = schema.getJSONObject(i); - return new Schema.Column( - jsonColumn.getString("name"), - jsonColumn.optString("alias", null), - Schema.Type.valueOf(jsonColumn.getString("type").toUpperCase()) - ); - } - ).collect(Collectors.toList()); - return columns; + private JSONObject schemaEntry(String name, String alias, String type) { + JSONObject entry = new JSONObject(); + entry.put("name", name); + if (alias != null) { + entry.put("alias", alias); } + entry.put("type", type); + return entry; + } + + private static String encodeCursor(JSONObject cursorJson) { + return Base64.getEncoder().encodeToString(cursorJson.toString().getBytes()); + } + + private static JSONObject decodeCursor(String cursorId) { + return new JSONObject(new String(Base64.getDecoder().decode(cursorId))); + } + + private static Map fieldAliasMap(JSONObject json) { + Map fieldToAliasMap = new HashMap<>(); + json.keySet().forEach(key -> fieldToAliasMap.put(key, json.get(key).toString())); + return fieldToAliasMap; + } + + private static List getColumnsFromSchema(JSONArray schema) { + List columns = + IntStream.range(0, schema.length()) + .mapToObj( + i -> { + JSONObject jsonColumn = schema.getJSONObject(i); + return new Schema.Column( + jsonColumn.getString("name"), + jsonColumn.optString("alias", null), + Schema.Type.valueOf(jsonColumn.getString("type").toUpperCase())); + }) + .collect(Collectors.toList()); + return columns; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java index 3b2691186b..b7d90b66da 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.google.common.collect.ImmutableList; @@ -17,66 +16,64 @@ import org.opensearch.sql.legacy.antlr.semantic.types.special.Product; import org.opensearch.sql.legacy.executor.format.Schema; -/** - * The definition of column type provider - */ +/** The definition of column type provider */ public class ColumnTypeProvider { - private final List typeList; + private final List typeList; - private static final Map TYPE_MAP = - new ImmutableMap.Builder() - .put(OpenSearchDataType.SHORT, Schema.Type.SHORT) - .put(OpenSearchDataType.LONG, Schema.Type.LONG) - .put(OpenSearchDataType.INTEGER, Schema.Type.INTEGER) - .put(OpenSearchDataType.FLOAT, Schema.Type.FLOAT) - .put(OpenSearchDataType.DOUBLE, Schema.Type.DOUBLE) - .put(OpenSearchDataType.KEYWORD, Schema.Type.KEYWORD) - .put(OpenSearchDataType.TEXT, Schema.Type.TEXT) - .put(OpenSearchDataType.STRING, Schema.Type.TEXT) - .put(OpenSearchDataType.DATE, Schema.Type.DATE) - .put(OpenSearchDataType.BOOLEAN, Schema.Type.BOOLEAN) - .put(OpenSearchDataType.UNKNOWN, Schema.Type.DOUBLE) - .build(); - public static final Schema.Type COLUMN_DEFAULT_TYPE = Schema.Type.DOUBLE; + private static final Map TYPE_MAP = + new ImmutableMap.Builder() + .put(OpenSearchDataType.SHORT, Schema.Type.SHORT) + .put(OpenSearchDataType.LONG, Schema.Type.LONG) + .put(OpenSearchDataType.INTEGER, Schema.Type.INTEGER) + .put(OpenSearchDataType.FLOAT, Schema.Type.FLOAT) + .put(OpenSearchDataType.DOUBLE, Schema.Type.DOUBLE) + .put(OpenSearchDataType.KEYWORD, Schema.Type.KEYWORD) + .put(OpenSearchDataType.TEXT, Schema.Type.TEXT) + .put(OpenSearchDataType.STRING, Schema.Type.TEXT) + .put(OpenSearchDataType.DATE, Schema.Type.DATE) + .put(OpenSearchDataType.BOOLEAN, Schema.Type.BOOLEAN) + .put(OpenSearchDataType.UNKNOWN, Schema.Type.DOUBLE) + .build(); + public static final Schema.Type COLUMN_DEFAULT_TYPE = Schema.Type.DOUBLE; - public ColumnTypeProvider(Type type) { - this.typeList = convertOutputColumnType(type); - } + public ColumnTypeProvider(Type type) { + this.typeList = convertOutputColumnType(type); + } - public ColumnTypeProvider() { - this.typeList = new ArrayList<>(); - } + public ColumnTypeProvider() { + this.typeList = new ArrayList<>(); + } - /** - * Get the type of column by index. - * - * @param index column index. - * @return column type. - */ - public Schema.Type get(int index) { - if (typeList.isEmpty()) { - return COLUMN_DEFAULT_TYPE; - } else { - return typeList.get(index); - } + /** + * Get the type of column by index. + * + * @param index column index. + * @return column type. + */ + public Schema.Type get(int index) { + if (typeList.isEmpty()) { + return COLUMN_DEFAULT_TYPE; + } else { + return typeList.get(index); } + } - private List convertOutputColumnType(Type type) { - if (type instanceof Product) { - List types = ((Product) type).getTypes(); - return types.stream().map(t -> convertType(t)).collect(Collectors.toList()); - } else if (type instanceof OpenSearchDataType) { - return ImmutableList.of(convertType(type)); - } else { - return ImmutableList.of(COLUMN_DEFAULT_TYPE); - } + private List convertOutputColumnType(Type type) { + if (type instanceof Product) { + List types = ((Product) type).getTypes(); + return types.stream().map(t -> convertType(t)).collect(Collectors.toList()); + } else if (type instanceof OpenSearchDataType) { + return ImmutableList.of(convertType(type)); + } else { + return ImmutableList.of(COLUMN_DEFAULT_TYPE); } + } - private Schema.Type convertType(Type type) { - try { - return TYPE_MAP.getOrDefault(type, COLUMN_DEFAULT_TYPE); - } catch (Exception e) { - return COLUMN_DEFAULT_TYPE; - } + private Schema.Type convertType(Type type) { + try { + return TYPE_MAP.getOrDefault(type, COLUMN_DEFAULT_TYPE); + } catch (Exception e) { + return COLUMN_DEFAULT_TYPE; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java index ff6b016ddb..8804c543f6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.SQLExpr; @@ -18,363 +17,368 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * - * * @author ansj */ public class Condition extends Where { - public enum OPERATOR { - - EQ, - GT, - LT, - GTE, - LTE, - N, - LIKE, - NLIKE, - REGEXP, - IS, - ISN, - IN, - NIN, - BETWEEN, - NBETWEEN, - GEO_INTERSECTS, - GEO_BOUNDING_BOX, - GEO_DISTANCE, - GEO_POLYGON, - IN_TERMS, - TERM, - IDS_QUERY, - NESTED_COMPLEX, - NOT_EXISTS_NESTED_COMPLEX, - CHILDREN_COMPLEX, - SCRIPT, - NIN_TERMS, - NTERM, - NREGEXP; - - public static Map methodNameToOpear; - - public static Map operStringToOpear; - - public static Map simpleOperStringToOpear; - - private static BiMap negatives; - - private static BiMap simpleReverses; - - static { - methodNameToOpear = new HashMap<>(); - methodNameToOpear.put("term", TERM); - methodNameToOpear.put("matchterm", TERM); - methodNameToOpear.put("match_term", TERM); - methodNameToOpear.put("terms", IN_TERMS); - methodNameToOpear.put("in_terms", IN_TERMS); - methodNameToOpear.put("ids", IDS_QUERY); - methodNameToOpear.put("ids_query", IDS_QUERY); - methodNameToOpear.put("regexp", REGEXP); - methodNameToOpear.put("regexp_query", REGEXP); - } - - static { - operStringToOpear = new HashMap<>(); - operStringToOpear.put("=", EQ); - operStringToOpear.put(">", GT); - operStringToOpear.put("<", LT); - operStringToOpear.put(">=", GTE); - operStringToOpear.put("<=", LTE); - operStringToOpear.put("<>", N); - operStringToOpear.put("LIKE", LIKE); - operStringToOpear.put("NOT", N); - operStringToOpear.put("NOT LIKE", NLIKE); - operStringToOpear.put("IS", IS); - operStringToOpear.put("IS NOT", ISN); - operStringToOpear.put("IN", IN); - operStringToOpear.put("NOT IN", NIN); - operStringToOpear.put("BETWEEN", BETWEEN); - operStringToOpear.put("NOT BETWEEN", NBETWEEN); - operStringToOpear.put("GEO_INTERSECTS", GEO_INTERSECTS); - operStringToOpear.put("GEO_BOUNDING_BOX", GEO_BOUNDING_BOX); - operStringToOpear.put("GEO_DISTANCE", GEO_DISTANCE); - operStringToOpear.put("GEO_POLYGON", GEO_POLYGON); - operStringToOpear.put("NESTED", NESTED_COMPLEX); - operStringToOpear.put("CHILDREN", CHILDREN_COMPLEX); - operStringToOpear.put("SCRIPT", SCRIPT); - } - - static { - simpleOperStringToOpear = new HashMap<>(); - simpleOperStringToOpear.put("=", EQ); - simpleOperStringToOpear.put(">", GT); - simpleOperStringToOpear.put("<", LT); - simpleOperStringToOpear.put(">=", GTE); - simpleOperStringToOpear.put("<=", LTE); - simpleOperStringToOpear.put("<>", N); - } - - static { - negatives = HashBiMap.create(7); - negatives.put(EQ, N); - negatives.put(IN_TERMS, NIN_TERMS); - negatives.put(TERM, NTERM); - negatives.put(GT, LTE); - negatives.put(LT, GTE); - negatives.put(LIKE, NLIKE); - negatives.put(IS, ISN); - negatives.put(IN, NIN); - negatives.put(BETWEEN, NBETWEEN); - negatives.put(NESTED_COMPLEX, NOT_EXISTS_NESTED_COMPLEX); - negatives.put(REGEXP, NREGEXP); - } - - static { - simpleReverses = HashBiMap.create(4); - simpleReverses.put(EQ, EQ); - simpleReverses.put(GT, LT); - simpleReverses.put(GTE, LTE); - simpleReverses.put(N, N); - } - - public OPERATOR negative() throws SqlParseException { - OPERATOR negative = negatives.get(this); - negative = negative != null ? negative : negatives.inverse().get(this); - if (negative == null) { - throw new SqlParseException(StringUtils.format("Negative operator [%s] is not supported.", - this.name())); - } - return negative; - } - - public OPERATOR simpleReverse() throws SqlParseException { - OPERATOR reverse = simpleReverses.get(this); - reverse = reverse != null ? reverse : simpleReverses.inverse().get(this); - if (reverse == null) { - throw new SqlParseException(StringUtils.format("Simple reverse operator [%s] is not supported.", - this.name())); - } - return reverse; - } - - public Boolean isSimpleOperator() { - return simpleOperStringToOpear.containsValue(this); - } + public enum OPERATOR { + EQ, + GT, + LT, + GTE, + LTE, + N, + LIKE, + NLIKE, + REGEXP, + IS, + ISN, + IN, + NIN, + BETWEEN, + NBETWEEN, + GEO_INTERSECTS, + GEO_BOUNDING_BOX, + GEO_DISTANCE, + GEO_POLYGON, + IN_TERMS, + TERM, + IDS_QUERY, + NESTED_COMPLEX, + NOT_EXISTS_NESTED_COMPLEX, + CHILDREN_COMPLEX, + SCRIPT, + NIN_TERMS, + NTERM, + NREGEXP; + + public static Map methodNameToOpear; + + public static Map operStringToOpear; + + public static Map simpleOperStringToOpear; + + private static BiMap negatives; + + private static BiMap simpleReverses; + + static { + methodNameToOpear = new HashMap<>(); + methodNameToOpear.put("term", TERM); + methodNameToOpear.put("matchterm", TERM); + methodNameToOpear.put("match_term", TERM); + methodNameToOpear.put("terms", IN_TERMS); + methodNameToOpear.put("in_terms", IN_TERMS); + methodNameToOpear.put("ids", IDS_QUERY); + methodNameToOpear.put("ids_query", IDS_QUERY); + methodNameToOpear.put("regexp", REGEXP); + methodNameToOpear.put("regexp_query", REGEXP); } - private String name; - - private SQLExpr nameExpr; - - private Object value; - - public SQLExpr getNameExpr() { - return nameExpr; + static { + operStringToOpear = new HashMap<>(); + operStringToOpear.put("=", EQ); + operStringToOpear.put(">", GT); + operStringToOpear.put("<", LT); + operStringToOpear.put(">=", GTE); + operStringToOpear.put("<=", LTE); + operStringToOpear.put("<>", N); + operStringToOpear.put("LIKE", LIKE); + operStringToOpear.put("NOT", N); + operStringToOpear.put("NOT LIKE", NLIKE); + operStringToOpear.put("IS", IS); + operStringToOpear.put("IS NOT", ISN); + operStringToOpear.put("IN", IN); + operStringToOpear.put("NOT IN", NIN); + operStringToOpear.put("BETWEEN", BETWEEN); + operStringToOpear.put("NOT BETWEEN", NBETWEEN); + operStringToOpear.put("GEO_INTERSECTS", GEO_INTERSECTS); + operStringToOpear.put("GEO_BOUNDING_BOX", GEO_BOUNDING_BOX); + operStringToOpear.put("GEO_DISTANCE", GEO_DISTANCE); + operStringToOpear.put("GEO_POLYGON", GEO_POLYGON); + operStringToOpear.put("NESTED", NESTED_COMPLEX); + operStringToOpear.put("CHILDREN", CHILDREN_COMPLEX); + operStringToOpear.put("SCRIPT", SCRIPT); } - public SQLExpr getValueExpr() { - return valueExpr; + static { + simpleOperStringToOpear = new HashMap<>(); + simpleOperStringToOpear.put("=", EQ); + simpleOperStringToOpear.put(">", GT); + simpleOperStringToOpear.put("<", LT); + simpleOperStringToOpear.put(">=", GTE); + simpleOperStringToOpear.put("<=", LTE); + simpleOperStringToOpear.put("<>", N); } - private SQLExpr valueExpr; - - private OPERATOR OPERATOR; - - private Object relationshipType; - - private boolean isNested; - private String nestedPath; - - private boolean isChildren; - private String childType; - - public Condition(CONN conn, String field, SQLExpr nameExpr, String condition, Object obj, SQLExpr valueExpr) - throws SqlParseException { - this(conn, field, nameExpr, condition, obj, valueExpr, null); + static { + negatives = HashBiMap.create(7); + negatives.put(EQ, N); + negatives.put(IN_TERMS, NIN_TERMS); + negatives.put(TERM, NTERM); + negatives.put(GT, LTE); + negatives.put(LT, GTE); + negatives.put(LIKE, NLIKE); + negatives.put(IS, ISN); + negatives.put(IN, NIN); + negatives.put(BETWEEN, NBETWEEN); + negatives.put(NESTED_COMPLEX, NOT_EXISTS_NESTED_COMPLEX); + negatives.put(REGEXP, NREGEXP); } - public Condition(CONN conn, String field, SQLExpr nameExpr, OPERATOR condition, Object obj, SQLExpr valueExpr) - throws SqlParseException { - this(conn, field, nameExpr, condition, obj, valueExpr, null); + static { + simpleReverses = HashBiMap.create(4); + simpleReverses.put(EQ, EQ); + simpleReverses.put(GT, LT); + simpleReverses.put(GTE, LTE); + simpleReverses.put(N, N); } - public Condition(CONN conn, String name, SQLExpr nameExpr, String oper, - Object value, SQLExpr valueExpr, Object relationshipType) throws SqlParseException { - super(conn); - - this.OPERATOR = null; - this.name = name; - this.value = value; - this.nameExpr = nameExpr; - this.valueExpr = valueExpr; - - this.relationshipType = relationshipType; - - if (this.relationshipType != null) { - if (this.relationshipType instanceof NestedType) { - NestedType nestedType = (NestedType) relationshipType; - - this.isNested = true; - this.nestedPath = nestedType.path; - this.isChildren = false; - this.childType = ""; - } else if (relationshipType instanceof ChildrenType) { - ChildrenType childrenType = (ChildrenType) relationshipType; - - this.isNested = false; - this.nestedPath = ""; - this.isChildren = true; - this.childType = childrenType.childType; - } - } else { - this.isNested = false; - this.nestedPath = ""; - this.isChildren = false; - this.childType = ""; - } - - if (OPERATOR.operStringToOpear.containsKey(oper)) { - this.OPERATOR = OPERATOR.operStringToOpear.get(oper); - } else { - throw new SqlParseException("Unsupported operation: " + oper); - } + public OPERATOR negative() throws SqlParseException { + OPERATOR negative = negatives.get(this); + negative = negative != null ? negative : negatives.inverse().get(this); + if (negative == null) { + throw new SqlParseException( + StringUtils.format("Negative operator [%s] is not supported.", this.name())); + } + return negative; } - - public Condition(CONN conn, - String name, - SQLExpr nameExpr, - OPERATOR oper, - Object value, - SQLExpr valueExpr, - Object relationshipType - ) throws SqlParseException { - super(conn); - - this.OPERATOR = null; - this.nameExpr = nameExpr; - this.valueExpr = valueExpr; - this.name = name; - this.value = value; - this.OPERATOR = oper; - this.relationshipType = relationshipType; - - if (this.relationshipType != null) { - if (this.relationshipType instanceof NestedType) { - NestedType nestedType = (NestedType) relationshipType; - - this.isNested = true; - this.nestedPath = nestedType.path; - this.isChildren = false; - this.childType = ""; - } else if (relationshipType instanceof ChildrenType) { - ChildrenType childrenType = (ChildrenType) relationshipType; - - this.isNested = false; - this.nestedPath = ""; - this.isChildren = true; - this.childType = childrenType.childType; - } - } else { - this.isNested = false; - this.nestedPath = ""; - this.isChildren = false; - this.childType = ""; - } + public OPERATOR simpleReverse() throws SqlParseException { + OPERATOR reverse = simpleReverses.get(this); + reverse = reverse != null ? reverse : simpleReverses.inverse().get(this); + if (reverse == null) { + throw new SqlParseException( + StringUtils.format("Simple reverse operator [%s] is not supported.", this.name())); + } + return reverse; } - public String getOpertatorSymbol() throws SqlParseException { - switch (OPERATOR) { - case EQ: - return "=="; - case GT: - return ">"; - case LT: - return "<"; - case GTE: - return ">="; - case LTE: - return "<="; - case N: - return "<>"; - case IS: - return "=="; - - case ISN: - return "!="; - default: - throw new SqlParseException(StringUtils.format("Failed to parse operator [%s]", OPERATOR)); - } + public Boolean isSimpleOperator() { + return simpleOperStringToOpear.containsValue(this); } - - - public String getName() { - return name; + } + + private String name; + + private SQLExpr nameExpr; + + private Object value; + + public SQLExpr getNameExpr() { + return nameExpr; + } + + public SQLExpr getValueExpr() { + return valueExpr; + } + + private SQLExpr valueExpr; + + private OPERATOR OPERATOR; + + private Object relationshipType; + + private boolean isNested; + private String nestedPath; + + private boolean isChildren; + private String childType; + + public Condition( + CONN conn, String field, SQLExpr nameExpr, String condition, Object obj, SQLExpr valueExpr) + throws SqlParseException { + this(conn, field, nameExpr, condition, obj, valueExpr, null); + } + + public Condition( + CONN conn, String field, SQLExpr nameExpr, OPERATOR condition, Object obj, SQLExpr valueExpr) + throws SqlParseException { + this(conn, field, nameExpr, condition, obj, valueExpr, null); + } + + public Condition( + CONN conn, + String name, + SQLExpr nameExpr, + String oper, + Object value, + SQLExpr valueExpr, + Object relationshipType) + throws SqlParseException { + super(conn); + + this.OPERATOR = null; + this.name = name; + this.value = value; + this.nameExpr = nameExpr; + this.valueExpr = valueExpr; + + this.relationshipType = relationshipType; + + if (this.relationshipType != null) { + if (this.relationshipType instanceof NestedType) { + NestedType nestedType = (NestedType) relationshipType; + + this.isNested = true; + this.nestedPath = nestedType.path; + this.isChildren = false; + this.childType = ""; + } else if (relationshipType instanceof ChildrenType) { + ChildrenType childrenType = (ChildrenType) relationshipType; + + this.isNested = false; + this.nestedPath = ""; + this.isChildren = true; + this.childType = childrenType.childType; + } + } else { + this.isNested = false; + this.nestedPath = ""; + this.isChildren = false; + this.childType = ""; } - public void setName(String name) { - this.name = name; + if (OPERATOR.operStringToOpear.containsKey(oper)) { + this.OPERATOR = OPERATOR.operStringToOpear.get(oper); + } else { + throw new SqlParseException("Unsupported operation: " + oper); } - - public Object getValue() { - return value; + } + + public Condition( + CONN conn, + String name, + SQLExpr nameExpr, + OPERATOR oper, + Object value, + SQLExpr valueExpr, + Object relationshipType) + throws SqlParseException { + super(conn); + + this.OPERATOR = null; + this.nameExpr = nameExpr; + this.valueExpr = valueExpr; + this.name = name; + this.value = value; + this.OPERATOR = oper; + this.relationshipType = relationshipType; + + if (this.relationshipType != null) { + if (this.relationshipType instanceof NestedType) { + NestedType nestedType = (NestedType) relationshipType; + + this.isNested = true; + this.nestedPath = nestedType.path; + this.isChildren = false; + this.childType = ""; + } else if (relationshipType instanceof ChildrenType) { + ChildrenType childrenType = (ChildrenType) relationshipType; + + this.isNested = false; + this.nestedPath = ""; + this.isChildren = true; + this.childType = childrenType.childType; + } + } else { + this.isNested = false; + this.nestedPath = ""; + this.isChildren = false; + this.childType = ""; } - - public void setValue(Object value) { - this.value = value; + } + + public String getOpertatorSymbol() throws SqlParseException { + switch (OPERATOR) { + case EQ: + return "=="; + case GT: + return ">"; + case LT: + return "<"; + case GTE: + return ">="; + case LTE: + return "<="; + case N: + return "<>"; + case IS: + return "=="; + + case ISN: + return "!="; + default: + throw new SqlParseException(StringUtils.format("Failed to parse operator [%s]", OPERATOR)); } + } - public OPERATOR getOPERATOR() { - return OPERATOR; - } + public String getName() { + return name; + } - public void setOPERATOR(OPERATOR OPERATOR) { - this.OPERATOR = OPERATOR; - } + public void setName(String name) { + this.name = name; + } - public Object getRelationshipType() { - return relationshipType; - } + public Object getValue() { + return value; + } - public void setRelationshipType(Object relationshipType) { - this.relationshipType = relationshipType; - } + public void setValue(Object value) { + this.value = value; + } - public boolean isNested() { - return isNested; - } + public OPERATOR getOPERATOR() { + return OPERATOR; + } - public void setNested(boolean isNested) { - this.isNested = isNested; - } + public void setOPERATOR(OPERATOR OPERATOR) { + this.OPERATOR = OPERATOR; + } - public String getNestedPath() { - return nestedPath; - } + public Object getRelationshipType() { + return relationshipType; + } - public void setNestedPath(String nestedPath) { - this.nestedPath = nestedPath; - } + public void setRelationshipType(Object relationshipType) { + this.relationshipType = relationshipType; + } - public boolean isChildren() { - return isChildren; - } + public boolean isNested() { + return isNested; + } - public void setChildren(boolean isChildren) { - this.isChildren = isChildren; - } + public void setNested(boolean isNested) { + this.isNested = isNested; + } - public String getChildType() { - return childType; - } + public String getNestedPath() { + return nestedPath; + } - public void setChildType(String childType) { - this.childType = childType; - } + public void setNestedPath(String nestedPath) { + this.nestedPath = nestedPath; + } + + public boolean isChildren() { + return isChildren; + } + + public void setChildren(boolean isChildren) { + this.isChildren = isChildren; + } + + public String getChildType() { + return childType; + } + + public void setChildType(String childType) { + this.childType = childType; + } /** - * Return true if the opear is {@link OPERATOR#NESTED_COMPLEX} + * Return true if the opear is {@link OPERATOR#NESTED_COMPLEX}
* For example, the opear is {@link OPERATOR#NESTED_COMPLEX} when condition is * nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') */ @@ -382,40 +386,53 @@ public boolean isNestedComplex() { return OPERATOR.NESTED_COMPLEX == OPERATOR; } - @Override - public String toString() { - String result = ""; - - if (this.isNested()) { - result = "nested condition "; - if (this.getNestedPath() != null) { - result += "on path:" + this.getNestedPath() + " "; - } - } else if (this.isChildren()) { - result = "children condition "; - - if (this.getChildType() != null) { - result += "on child: " + this.getChildType() + " "; - } - } - - if (value instanceof Object[]) { - result += this.conn + " " + this.name + " " + this.OPERATOR + " " + Arrays.toString((Object[]) value); - } else { - result += this.conn + " " + this.name + " " + this.OPERATOR + " " + this.value; - } - - return result; + @Override + public String toString() { + String result = ""; + + if (this.isNested()) { + result = "nested condition "; + if (this.getNestedPath() != null) { + result += "on path:" + this.getNestedPath() + " "; + } + } else if (this.isChildren()) { + result = "children condition "; + + if (this.getChildType() != null) { + result += "on child: " + this.getChildType() + " "; + } + } + + if (value instanceof Object[]) { + result += + this.conn + + " " + + this.name + + " " + + this.OPERATOR + + " " + + Arrays.toString((Object[]) value); + } else { + result += this.conn + " " + this.name + " " + this.OPERATOR + " " + this.value; } - @Override - public Object clone() throws CloneNotSupportedException { - try { - return new Condition(this.getConn(), this.getName(), this.getNameExpr(), - this.getOPERATOR(), this.getValue(), this.getValueExpr(), this.getRelationshipType()); - } catch (SqlParseException e) { + return result; + } + + @Override + public Object clone() throws CloneNotSupportedException { + try { + return new Condition( + this.getConn(), + this.getName(), + this.getNameExpr(), + this.getOPERATOR(), + this.getValue(), + this.getValueExpr(), + this.getRelationshipType()); + } catch (SqlParseException e) { - } - return null; } + return null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java index 587a8b3ef9..efa77da0a5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java @@ -3,12 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; -/** - * SQL Delete statement. - */ -public class Delete extends Query { - -} +/** SQL Delete statement. */ +public class Delete extends Query {} diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java index 996caae5e2..635d0062a5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java @@ -3,39 +3,35 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.bucketpath; import java.util.ArrayDeque; import java.util.Deque; /** - * The bucket path syntax + * The bucket path syntax
* [ , ]* [ , ] * - * https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline.html#buckets-path-syntax + *

https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline.html#buckets-path-syntax */ public class BucketPath { - private Deque pathStack = new ArrayDeque<>(); + private Deque pathStack = new ArrayDeque<>(); - public BucketPath add(Path path) { - if (pathStack.isEmpty()) { - assert path.isMetricPath() : "The last path in the bucket path must be Metric"; - } else { - assert path.isAggPath() : "All the other path in the bucket path must be Agg"; - } - pathStack.push(path); - return this; + public BucketPath add(Path path) { + if (pathStack.isEmpty()) { + assert path.isMetricPath() : "The last path in the bucket path must be Metric"; + } else { + assert path.isAggPath() : "All the other path in the bucket path must be Agg"; } + pathStack.push(path); + return this; + } - /** - * Return the bucket path. - * Return "", if there is no agg or metric available - */ - public String getBucketPath() { - String bucketPath = pathStack.isEmpty() ? "" : pathStack.pop().getPath(); - return pathStack.stream() - .map(path -> path.getSeparator() + path.getPath()) - .reduce(bucketPath, String::concat); - } + /** Return the bucket path. Return "", if there is no agg or metric available */ + public String getBucketPath() { + String bucketPath = pathStack.isEmpty() ? "" : pathStack.pop().getPath(); + return pathStack.stream() + .map(path -> path.getSeparator() + path.getPath()) + .reduce(bucketPath, String::concat); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java index d251585f89..4fdf6391bd 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; @@ -29,135 +28,141 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.Transports; -/** - * A RestExecutor wrapper to execute request asynchronously to avoid blocking transport thread. - */ +/** A RestExecutor wrapper to execute request asynchronously to avoid blocking transport thread. */ public class AsyncRestExecutor implements RestExecutor { - /** - * Custom thread pool name managed by OpenSearch - */ - public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; - - private static final Logger LOG = LogManager.getLogger(AsyncRestExecutor.class); - - /** - * Treat all actions as blocking which means async all actions, - * ex. execute() in csv executor or pretty format executor - */ - private static final Predicate ALL_ACTION_IS_BLOCKING = anyAction -> true; - - /** - * Delegated rest executor to async - */ - private final RestExecutor executor; - - /** - * Request type that expect to async to avoid blocking - */ - private final Predicate isBlocking; - - - AsyncRestExecutor(RestExecutor executor) { - this(executor, ALL_ACTION_IS_BLOCKING); + /** Custom thread pool name managed by OpenSearch */ + public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; + + private static final Logger LOG = LogManager.getLogger(AsyncRestExecutor.class); + + /** + * Treat all actions as blocking which means async all actions, ex. execute() in csv executor or + * pretty format executor + */ + private static final Predicate ALL_ACTION_IS_BLOCKING = anyAction -> true; + + /** Delegated rest executor to async */ + private final RestExecutor executor; + + /** Request type that expect to async to avoid blocking */ + private final Predicate isBlocking; + + AsyncRestExecutor(RestExecutor executor) { + this(executor, ALL_ACTION_IS_BLOCKING); + } + + AsyncRestExecutor(RestExecutor executor, Predicate isBlocking) { + this.executor = executor; + this.isBlocking = isBlocking; + } + + @Override + public void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) + throws Exception { + if (isBlockingAction(queryAction) && isRunningInTransportThread()) { + if (LOG.isDebugEnabled()) { + LOG.debug( + "[{}] Async blocking query action [{}] for executor [{}] in current thread [{}]", + QueryContext.getRequestId(), + name(executor), + name(queryAction), + Thread.currentThread().getName()); + } + async(client, params, queryAction, channel); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug( + "[{}] Continue running query action [{}] for executor [{}] in current thread [{}]", + QueryContext.getRequestId(), + name(executor), + name(queryAction), + Thread.currentThread().getName()); + } + doExecuteWithTimeMeasured(client, params, queryAction, channel); } - - AsyncRestExecutor(RestExecutor executor, Predicate isBlocking) { - this.executor = executor; - this.isBlocking = isBlocking; - } - - @Override - public void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) - throws Exception { - if (isBlockingAction(queryAction) && isRunningInTransportThread()) { - if (LOG.isDebugEnabled()) { - LOG.debug("[{}] Async blocking query action [{}] for executor [{}] in current thread [{}]", - QueryContext.getRequestId(), name(executor), name(queryAction), Thread.currentThread().getName()); - } - async(client, params, queryAction, channel); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("[{}] Continue running query action [{}] for executor [{}] in current thread [{}]", - QueryContext.getRequestId(), name(executor), name(queryAction), Thread.currentThread().getName()); - } + } + + @Override + public String execute(Client client, Map params, QueryAction queryAction) + throws Exception { + // Result is always required and no easy way to async it here. + return executor.execute(client, params, queryAction); + } + + private boolean isBlockingAction(QueryAction queryAction) { + return isBlocking.test(queryAction); + } + + private boolean isRunningInTransportThread() { + return Transports.isTransportThread(Thread.currentThread()); + } + + /** Run given task in thread pool asynchronously */ + private void async( + Client client, Map params, QueryAction queryAction, RestChannel channel) { + + ThreadPool threadPool = client.threadPool(); + Runnable runnable = + () -> { + try { doExecuteWithTimeMeasured(client, params, queryAction, channel); - } - } - - @Override - public String execute(Client client, Map params, QueryAction queryAction) throws Exception { - // Result is always required and no easy way to async it here. - return executor.execute(client, params, queryAction); - } - - private boolean isBlockingAction(QueryAction queryAction) { - return isBlocking.test(queryAction); - } - - private boolean isRunningInTransportThread() { - return Transports.isTransportThread(Thread.currentThread()); - } - - /** - * Run given task in thread pool asynchronously - */ - private void async(Client client, Map params, QueryAction queryAction, RestChannel channel) { - - ThreadPool threadPool = client.threadPool(); - Runnable runnable = () -> { - try { - doExecuteWithTimeMeasured(client, params, queryAction, channel); - } catch (IOException | SqlParseException | OpenSearchException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an IO/SQL exception: {}", QueryContext.getRequestId(), - e.getMessage()); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); - } catch (IllegalStateException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got a runtime exception: {}", QueryContext.getRequestId(), - e.getMessage()); - channel.sendResponse(new BytesRestResponse(RestStatus.INSUFFICIENT_STORAGE, - "Memory circuit is broken.")); - } catch (Throwable t) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an unknown throwable: {}", QueryContext.getRequestId(), - t.getMessage()); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, - String.valueOf(t.getMessage()))); - } finally { - BackOffRetryStrategy.releaseMem(executor); - } + } catch (IOException | SqlParseException | OpenSearchException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an IO/SQL exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + channel.sendResponse( + new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } catch (IllegalStateException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got a runtime exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INSUFFICIENT_STORAGE, "Memory circuit is broken.")); + } catch (Throwable t) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an unknown throwable: {}", + QueryContext.getRequestId(), + t.getMessage()); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INTERNAL_SERVER_ERROR, String.valueOf(t.getMessage()))); + } finally { + BackOffRetryStrategy.releaseMem(executor); + } }; - // Preserve context of calling thread to ensure headers of requests are forwarded when running blocking actions - threadPool.schedule( - QueryContext.withCurrentContext(runnable), - new TimeValue(0L), - SQL_WORKER_THREAD_POOL_NAME - ); + // Preserve context of calling thread to ensure headers of requests are forwarded when running + // blocking actions + threadPool.schedule( + QueryContext.withCurrentContext(runnable), new TimeValue(0L), SQL_WORKER_THREAD_POOL_NAME); + } + + /** Time the real execution of Executor and log slow query for troubleshooting */ + private void doExecuteWithTimeMeasured( + Client client, Map params, QueryAction action, RestChannel channel) + throws Exception { + long startTime = System.nanoTime(); + try { + executor.execute(client, params, action, channel); + } finally { + Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); + int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); + if (elapsed.getSeconds() >= slowLogThreshold) { + LOG.warn( + "[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); + } } + } - /** - * Time the real execution of Executor and log slow query for troubleshooting - */ - private void doExecuteWithTimeMeasured(Client client, - Map params, - QueryAction action, - RestChannel channel) throws Exception { - long startTime = System.nanoTime(); - try { - executor.execute(client, params, action, channel); - } finally { - Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); - int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); - if (elapsed.getSeconds() >= slowLogThreshold) { - LOG.warn("[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); - } - } - } - - private String name(Object object) { - return object.getClass().getSimpleName(); - } + private String name(Object object) { + return object.getClass().getSimpleName(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java index a5dd066536..54c4dd5abb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import com.google.common.collect.Maps; @@ -35,90 +34,94 @@ import org.opensearch.sql.legacy.query.join.JoinRequestBuilder; import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; - public class ElasticDefaultRestExecutor implements RestExecutor { - /** - * Request builder to generate OpenSearch DSL - */ - private final SqlElasticRequestBuilder requestBuilder; + /** Request builder to generate OpenSearch DSL */ + private final SqlElasticRequestBuilder requestBuilder; - private static final Logger LOG = LogManager.getLogger(ElasticDefaultRestExecutor.class); + private static final Logger LOG = LogManager.getLogger(ElasticDefaultRestExecutor.class); - public ElasticDefaultRestExecutor(QueryAction queryAction) { - // Put explain() here to make it run in NIO thread - try { - this.requestBuilder = queryAction.explain(); - } catch (SqlParseException e) { - throw new IllegalStateException("Failed to explain query action", e); - } + public ElasticDefaultRestExecutor(QueryAction queryAction) { + // Put explain() here to make it run in NIO thread + try { + this.requestBuilder = queryAction.explain(); + } catch (SqlParseException e) { + throw new IllegalStateException("Failed to explain query action", e); } + } - /** - * Execute the ActionRequest and returns the REST response using the channel. - */ - @Override - public void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) - throws Exception { - ActionRequest request = requestBuilder.request(); + /** Execute the ActionRequest and returns the REST response using the channel. */ + @Override + public void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) + throws Exception { + ActionRequest request = requestBuilder.request(); - if (requestBuilder instanceof JoinRequestBuilder) { - ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); - executor.run(); - executor.sendResponse(channel); - } else if (requestBuilder instanceof MultiQueryRequestBuilder) { - ElasticHitsExecutor executor = MultiRequestExecutorFactory.createExecutor(client, - (MultiQueryRequestBuilder) requestBuilder); - executor.run(); - sendDefaultResponse(executor.getHits(), channel); - } else if (request instanceof SearchRequest) { - client.search((SearchRequest) request, new RestStatusToXContentListener<>(channel)); - } else if (request instanceof DeleteByQueryRequest) { - requestBuilder.getBuilder().execute( - new BulkIndexByScrollResponseContentListener(channel, Maps.newHashMap())); - } else if (request instanceof GetIndexRequest) { - requestBuilder.getBuilder().execute(new GetIndexRequestRestListener(channel, (GetIndexRequest) request)); - } else if (request instanceof SearchScrollRequest) { - client.searchScroll((SearchScrollRequest) request, new RestStatusToXContentListener<>(channel)); - } else { - throw new Exception(String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); - } + if (requestBuilder instanceof JoinRequestBuilder) { + ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); + executor.run(); + executor.sendResponse(channel); + } else if (requestBuilder instanceof MultiQueryRequestBuilder) { + ElasticHitsExecutor executor = + MultiRequestExecutorFactory.createExecutor( + client, (MultiQueryRequestBuilder) requestBuilder); + executor.run(); + sendDefaultResponse(executor.getHits(), channel); + } else if (request instanceof SearchRequest) { + client.search((SearchRequest) request, new RestStatusToXContentListener<>(channel)); + } else if (request instanceof DeleteByQueryRequest) { + requestBuilder + .getBuilder() + .execute(new BulkIndexByScrollResponseContentListener(channel, Maps.newHashMap())); + } else if (request instanceof GetIndexRequest) { + requestBuilder + .getBuilder() + .execute(new GetIndexRequestRestListener(channel, (GetIndexRequest) request)); + } else if (request instanceof SearchScrollRequest) { + client.searchScroll( + (SearchScrollRequest) request, new RestStatusToXContentListener<>(channel)); + } else { + throw new Exception( + String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); } + } - @Override - public String execute(Client client, Map params, QueryAction queryAction) throws Exception { - ActionRequest request = requestBuilder.request(); - - if (requestBuilder instanceof JoinRequestBuilder) { - ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); - executor.run(); - return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); - } else if (requestBuilder instanceof MultiQueryRequestBuilder) { - ElasticHitsExecutor executor = MultiRequestExecutorFactory.createExecutor(client, - (MultiQueryRequestBuilder) requestBuilder); - executor.run(); - return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); - } else if (request instanceof SearchRequest) { - ActionFuture future = client.search((SearchRequest) request); - SearchResponse response = future.actionGet(); - return response.toString(); - } else if (request instanceof DeleteByQueryRequest) { - return requestBuilder.get().toString(); - } else if (request instanceof GetIndexRequest) { - return requestBuilder.getBuilder().execute().actionGet().toString(); - } else { - throw new Exception(String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); - } + @Override + public String execute(Client client, Map params, QueryAction queryAction) + throws Exception { + ActionRequest request = requestBuilder.request(); + if (requestBuilder instanceof JoinRequestBuilder) { + ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); + executor.run(); + return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); + } else if (requestBuilder instanceof MultiQueryRequestBuilder) { + ElasticHitsExecutor executor = + MultiRequestExecutorFactory.createExecutor( + client, (MultiQueryRequestBuilder) requestBuilder); + executor.run(); + return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); + } else if (request instanceof SearchRequest) { + ActionFuture future = client.search((SearchRequest) request); + SearchResponse response = future.actionGet(); + return response.toString(); + } else if (request instanceof DeleteByQueryRequest) { + return requestBuilder.get().toString(); + } else if (request instanceof GetIndexRequest) { + return requestBuilder.getBuilder().execute().actionGet().toString(); + } else { + throw new Exception( + String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); } + } - private void sendDefaultResponse(SearchHits hits, RestChannel channel) { - try { - String json = ElasticUtils.hitsAsStringResult(hits, new MetaSearchResult()); - BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, json); - channel.sendResponse(bytesRestResponse); - } catch (IOException e) { - e.printStackTrace(); - } + private void sendDefaultResponse(SearchHits hits, RestChannel channel) { + try { + String json = ElasticUtils.hitsAsStringResult(hits, new MetaSearchResult()); + BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, json); + channel.sendResponse(bytesRestResponse); + } catch (IOException e) { + e.printStackTrace(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java index c48eb673bd..62a6d63ef7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java @@ -3,18 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; import org.opensearch.search.SearchHits; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Created by Eliran on 21/8/2016. - */ +/** Created by Eliran on 21/8/2016. */ public interface ElasticHitsExecutor { - void run() throws IOException, SqlParseException; + void run() throws IOException, SqlParseException; - SearchHits getHits(); + SearchHits getHits(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java index ff241fce77..6f753a5e7c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java @@ -3,38 +3,34 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.util.Map; import org.opensearch.search.SearchHit; -/** - * Created by Eliran on 3/10/2015. - */ +/** Created by Eliran on 3/10/2015. */ public class ElasticResultHandler { - public static Object getFieldValue(SearchHit hit, String field) { - return deepSearchInMap(hit.getSourceAsMap(), field); - } + public static Object getFieldValue(SearchHit hit, String field) { + return deepSearchInMap(hit.getSourceAsMap(), field); + } - private static Object deepSearchInMap(Map fieldsMap, String name) { - if (name.contains(".")) { - String[] path = name.split("\\."); - Map currentObject = fieldsMap; - for (int i = 0; i < path.length - 1; i++) { - Object valueFromCurrentMap = currentObject.get(path[i]); - if (valueFromCurrentMap == null) { - return null; - } - if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { - return null; - } - currentObject = (Map) valueFromCurrentMap; - } - return currentObject.get(path[path.length - 1]); + private static Object deepSearchInMap(Map fieldsMap, String name) { + if (name.contains(".")) { + String[] path = name.split("\\."); + Map currentObject = fieldsMap; + for (int i = 0; i < path.length - 1; i++) { + Object valueFromCurrentMap = currentObject.get(path[i]); + if (valueFromCurrentMap == null) { + return null; } - - return fieldsMap.get(name); + if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { + return null; + } + currentObject = (Map) valueFromCurrentMap; + } + return currentObject.get(path[path.length - 1]); } + return fieldsMap.get(name); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java index 680c0c8e85..28bc559a01 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import com.google.common.collect.ImmutableSet; @@ -12,86 +11,86 @@ import java.util.Set; import java.util.stream.Collectors; -/** - * Created by Eliran on 27/12/2015. - */ +/** Created by Eliran on 27/12/2015. */ public class CSVResult { - private static final Set SENSITIVE_CHAR = ImmutableSet.of("=", "+", "-", "@"); + private static final Set SENSITIVE_CHAR = ImmutableSet.of("=", "+", "-", "@"); - private final List headers; - private final List lines; + private final List headers; + private final List lines; - /** - * Skip sanitizing if string line provided. This constructor is basically used by - * assertion in test code. - */ - public CSVResult(List headers, List lines) { - this.headers = headers; - this.lines = lines; - } + /** + * Skip sanitizing if string line provided. This constructor is basically used by assertion in + * test code. + */ + public CSVResult(List headers, List lines) { + this.headers = headers; + this.lines = lines; + } /** * Sanitize both headers and data lines by: - * 1) First prepend single quote if first char is sensitive (= - + @) - * 2) Second double quote entire cell if any comma found + *

    + *
  1. First prepend single quote if first char is sensitive (= - + @) + *
  2. Second double quote entire cell if any comma found + *
*/ public CSVResult(String separator, List headers, List> lines) { this.headers = sanitizeHeaders(separator, headers); this.lines = sanitizeLines(separator, lines); } - /** - * Return CSV header names which are sanitized because OpenSearch allows - * special character present in field name too. - * @return CSV header name list after sanitized - */ - public List getHeaders() { - return headers; - } - - /** - * Return CSV lines in which each cell is sanitized to avoid CSV injection. - * @return CSV lines after sanitized - */ - public List getLines() { - return lines; + /** + * Return CSV header names which are sanitized because OpenSearch allows special character present + * in field name too. + * + * @return CSV header name list after sanitized + */ + public List getHeaders() { + return headers; + } + + /** + * Return CSV lines in which each cell is sanitized to avoid CSV injection. + * + * @return CSV lines after sanitized + */ + public List getLines() { + return lines; + } + + private List sanitizeHeaders(String separator, List headers) { + return headers.stream() + .map(this::sanitizeCell) + .map(cell -> quoteIfRequired(separator, cell)) + .collect(Collectors.toList()); + } + + private List sanitizeLines(String separator, List> lines) { + List result = new ArrayList<>(); + for (List line : lines) { + result.add( + line.stream() + .map(this::sanitizeCell) + .map(cell -> quoteIfRequired(separator, cell)) + .collect(Collectors.joining(separator))); } + return result; + } - private List sanitizeHeaders(String separator, List headers) { - return headers.stream(). - map(this::sanitizeCell). - map(cell -> quoteIfRequired(separator, cell)). - collect(Collectors.toList()); + private String sanitizeCell(String cell) { + if (isStartWithSensitiveChar(cell)) { + return "'" + cell; } + return cell; + } - private List sanitizeLines(String separator, List> lines) { - List result = new ArrayList<>(); - for (List line : lines) { - result.add(line.stream(). - map(this::sanitizeCell). - map(cell -> quoteIfRequired(separator, cell)). - collect(Collectors.joining(separator))); - } - return result; - } - - private String sanitizeCell(String cell) { - if (isStartWithSensitiveChar(cell)) { - return "'" + cell; - } - return cell; - } - - private String quoteIfRequired(String separator, String cell) { - final String quote = "\""; - return cell.contains(separator) - ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; - } - - private boolean isStartWithSensitiveChar(String cell) { - return SENSITIVE_CHAR.stream(). - anyMatch(cell::startsWith); - } + private String quoteIfRequired(String separator, String cell) { + final String quote = "\""; + return cell.contains(separator) ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; + } + private boolean isStartWithSensitiveChar(String cell) { + return SENSITIVE_CHAR.stream().anyMatch(cell::startsWith); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java index da99652e13..a69ff31a49 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import com.google.common.base.Joiner; @@ -18,60 +17,64 @@ import org.opensearch.sql.legacy.query.QueryAction; import org.opensearch.sql.legacy.query.join.BackOffRetryStrategy; -/** - * Created by Eliran on 26/12/2015. - */ +/** Created by Eliran on 26/12/2015. */ public class CSVResultRestExecutor implements RestExecutor { - @Override - public void execute(final Client client, final Map params, final QueryAction queryAction, - final RestChannel channel) throws Exception { - - final String csvString = execute(client, params, queryAction); - final BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, csvString); + @Override + public void execute( + final Client client, + final Map params, + final QueryAction queryAction, + final RestChannel channel) + throws Exception { - if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { - throw new IllegalStateException( - "[CSVResultRestExecutor] Memory could be insufficient when sendResponse()."); - } + final String csvString = execute(client, params, queryAction); + final BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, csvString); - channel.sendResponse(bytesRestResponse); + if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { + throw new IllegalStateException( + "[CSVResultRestExecutor] Memory could be insufficient when sendResponse()."); } - @Override - public String execute(final Client client, final Map params, final QueryAction queryAction) - throws Exception { + channel.sendResponse(bytesRestResponse); + } - final Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); + @Override + public String execute( + final Client client, final Map params, final QueryAction queryAction) + throws Exception { - final String separator = params.getOrDefault("separator", ","); - final String newLine = params.getOrDefault("newLine", "\n"); + final Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); - final boolean flat = getBooleanOrDefault(params, "flat", false); - final boolean includeScore = getBooleanOrDefault(params, "_score", false); - final boolean includeId = getBooleanOrDefault(params, "_id", false); + final String separator = params.getOrDefault("separator", ","); + final String newLine = params.getOrDefault("newLine", "\n"); - final List fieldNames = queryAction.getFieldNames().orElse(null); - final CSVResult result = new CSVResultsExtractor(includeScore, includeId) - .extractResults(queryResult, flat, separator, fieldNames); + final boolean flat = getBooleanOrDefault(params, "flat", false); + final boolean includeScore = getBooleanOrDefault(params, "_score", false); + final boolean includeId = getBooleanOrDefault(params, "_id", false); - return buildString(separator, result, newLine); - } + final List fieldNames = queryAction.getFieldNames().orElse(null); + final CSVResult result = + new CSVResultsExtractor(includeScore, includeId) + .extractResults(queryResult, flat, separator, fieldNames); - private boolean getBooleanOrDefault(Map params, String param, boolean defaultValue) { - boolean flat = defaultValue; - if (params.containsKey(param)) { - flat = Boolean.parseBoolean(params.get(param)); - } - return flat; - } + return buildString(separator, result, newLine); + } - private String buildString(String separator, CSVResult result, String newLine) { - StringBuilder csv = new StringBuilder(); - csv.append(Joiner.on(separator).join(result.getHeaders())); - csv.append(newLine); - csv.append(Joiner.on(newLine).join(result.getLines())); - return csv.toString(); + private boolean getBooleanOrDefault( + Map params, String param, boolean defaultValue) { + boolean flat = defaultValue; + if (params.containsKey(param)) { + flat = Boolean.parseBoolean(params.get(param)); } - + return flat; + } + + private String buildString(String separator, CSVResult result, String newLine) { + StringBuilder csv = new StringBuilder(); + csv.append(Joiner.on(separator).join(result.getHeaders())); + csv.append(newLine); + csv.append(Joiner.on(newLine).join(result.getLines())); + return csv.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java index a22d96c133..5a3b3bc498 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import java.util.ArrayList; @@ -31,320 +30,332 @@ import org.opensearch.sql.legacy.expression.model.ExprValue; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 27/12/2015. - */ +/** Created by Eliran on 27/12/2015. */ public class CSVResultsExtractor { - private final boolean includeScore; - private final boolean includeId; - private int currentLineIndex; - - public CSVResultsExtractor(boolean includeScore, boolean includeId) { - this.includeScore = includeScore; - this.includeId = includeId; - this.currentLineIndex = 0; + private final boolean includeScore; + private final boolean includeId; + private int currentLineIndex; + + public CSVResultsExtractor(boolean includeScore, boolean includeId) { + this.includeScore = includeScore; + this.includeId = includeId; + this.currentLineIndex = 0; + } + + public CSVResult extractResults( + Object queryResult, boolean flat, String separator, final List fieldNames) + throws CsvExtractorException { + + if (queryResult instanceof SearchHits) { + SearchHit[] hits = ((SearchHits) queryResult).getHits(); + List> docsAsMap = new ArrayList<>(); + List headers = createHeadersAndFillDocsMap(flat, hits, docsAsMap, fieldNames); + List> csvLines = createCSVLinesFromDocs(flat, separator, docsAsMap, headers); + return new CSVResult(separator, headers, csvLines); } - - public CSVResult extractResults(Object queryResult, boolean flat, String separator, - final List fieldNames) throws CsvExtractorException { - - if (queryResult instanceof SearchHits) { - SearchHit[] hits = ((SearchHits) queryResult).getHits(); - List> docsAsMap = new ArrayList<>(); - List headers = createHeadersAndFillDocsMap(flat, hits, docsAsMap, fieldNames); - List> csvLines = createCSVLinesFromDocs(flat, separator, docsAsMap, headers); - return new CSVResult(separator, headers, csvLines); - } - if (queryResult instanceof Aggregations) { - List headers = new ArrayList<>(); - List> lines = new ArrayList<>(); - lines.add(new ArrayList()); - handleAggregations((Aggregations) queryResult, headers, lines); - return new CSVResult(separator, headers, lines); - } - // Handle List result. - if (queryResult instanceof List) { - List bindingTuples = (List) queryResult; - List> csvLines = bindingTuples.stream().map(tuple -> { - Map bindingMap = tuple.getBindingMap(); - List rowValues = new ArrayList<>(); - for (String fieldName : fieldNames) { - if (bindingMap.containsKey(fieldName)) { + if (queryResult instanceof Aggregations) { + List headers = new ArrayList<>(); + List> lines = new ArrayList<>(); + lines.add(new ArrayList()); + handleAggregations((Aggregations) queryResult, headers, lines); + return new CSVResult(separator, headers, lines); + } + // Handle List result. + if (queryResult instanceof List) { + List bindingTuples = (List) queryResult; + List> csvLines = + bindingTuples.stream() + .map( + tuple -> { + Map bindingMap = tuple.getBindingMap(); + List rowValues = new ArrayList<>(); + for (String fieldName : fieldNames) { + if (bindingMap.containsKey(fieldName)) { rowValues.add(String.valueOf(bindingMap.get(fieldName).value())); - } else { + } else { rowValues.add(""); + } } - } - return rowValues; - }).collect(Collectors.toList()); + return rowValues; + }) + .collect(Collectors.toList()); - return new CSVResult(separator, fieldNames, csvLines); - } - return null; + return new CSVResult(separator, fieldNames, csvLines); } - - private void handleAggregations(Aggregations aggregations, List headers, List> lines) - throws CsvExtractorException { - if (allNumericAggregations(aggregations)) { - lines.get(this.currentLineIndex) - .addAll(fillHeaderAndCreateLineForNumericAggregations(aggregations, headers)); - return; - } - //aggregations with size one only supported when not metrics. - List aggregationList = aggregations.asList(); - if (aggregationList.size() > 1) { - throw new CsvExtractorException( - "currently support only one aggregation at same level (Except for numeric metrics)"); - } - Aggregation aggregation = aggregationList.get(0); - //we want to skip singleBucketAggregations (nested,reverse_nested,filters) - if (aggregation instanceof SingleBucketAggregation) { - Aggregations singleBucketAggs = ((SingleBucketAggregation) aggregation).getAggregations(); - handleAggregations(singleBucketAggs, headers, lines); - return; - } - if (aggregation instanceof NumericMetricsAggregation) { - handleNumericMetricAggregation(headers, lines.get(currentLineIndex), aggregation); - return; - } - if (aggregation instanceof GeoBounds) { - handleGeoBoundsAggregation(headers, lines, (GeoBounds) aggregation); - return; - } - if (aggregation instanceof TopHits) { - //todo: handle this . it returns hits... maby back to normal? - //todo: read about this usages - // TopHits topHitsAggregation = (TopHits) aggregation; - } - if (aggregation instanceof MultiBucketsAggregation) { - MultiBucketsAggregation bucketsAggregation = (MultiBucketsAggregation) aggregation; - String name = bucketsAggregation.getName(); - //checking because it can comes from sub aggregation again - if (!headers.contains(name)) { - headers.add(name); - } - Collection buckets = bucketsAggregation.getBuckets(); - - //clone current line. - List currentLine = lines.get(this.currentLineIndex); - List clonedLine = new ArrayList<>(currentLine); - - //call handle_Agg with current_line++ - boolean firstLine = true; - for (MultiBucketsAggregation.Bucket bucket : buckets) { - //each bucket need to add new line with current line copied => except for first line - String key = bucket.getKeyAsString(); - if (firstLine) { - firstLine = false; - } else { - currentLineIndex++; - currentLine = new ArrayList(clonedLine); - lines.add(currentLine); - } - currentLine.add(key); - handleAggregations(bucket.getAggregations(), headers, lines); - - } - } + return null; + } + + private void handleAggregations( + Aggregations aggregations, List headers, List> lines) + throws CsvExtractorException { + if (allNumericAggregations(aggregations)) { + lines + .get(this.currentLineIndex) + .addAll(fillHeaderAndCreateLineForNumericAggregations(aggregations, headers)); + return; } - - private void handleGeoBoundsAggregation(List headers, List> lines, - GeoBounds geoBoundsAggregation) { - String geoBoundAggName = geoBoundsAggregation.getName(); - headers.add(geoBoundAggName + ".topLeft.lon"); - headers.add(geoBoundAggName + ".topLeft.lat"); - headers.add(geoBoundAggName + ".bottomRight.lon"); - headers.add(geoBoundAggName + ".bottomRight.lat"); - List line = lines.get(this.currentLineIndex); - line.add(String.valueOf(geoBoundsAggregation.topLeft().getLon())); - line.add(String.valueOf(geoBoundsAggregation.topLeft().getLat())); - line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLon())); - line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLat())); - lines.add(line); + // aggregations with size one only supported when not metrics. + List aggregationList = aggregations.asList(); + if (aggregationList.size() > 1) { + throw new CsvExtractorException( + "currently support only one aggregation at same level (Except for numeric metrics)"); } - - private List fillHeaderAndCreateLineForNumericAggregations(Aggregations aggregations, List header) - throws CsvExtractorException { - List line = new ArrayList<>(); - List aggregationList = aggregations.asList(); - for (Aggregation aggregation : aggregationList) { - handleNumericMetricAggregation(header, line, aggregation); - } - return line; + Aggregation aggregation = aggregationList.get(0); + // we want to skip singleBucketAggregations (nested,reverse_nested,filters) + if (aggregation instanceof SingleBucketAggregation) { + Aggregations singleBucketAggs = ((SingleBucketAggregation) aggregation).getAggregations(); + handleAggregations(singleBucketAggs, headers, lines); + return; } - - private void handleNumericMetricAggregation(List header, List line, Aggregation aggregation) - throws CsvExtractorException { - final String name = aggregation.getName(); - - if (aggregation instanceof NumericMetricsAggregation.SingleValue) { - if (!header.contains(name)) { - header.add(name); - } - NumericMetricsAggregation.SingleValue agg = (NumericMetricsAggregation.SingleValue) aggregation; - line.add(!Double.isInfinite(agg.value()) ? agg.getValueAsString() : "null"); - } else if (aggregation instanceof NumericMetricsAggregation.MultiValue) { - //todo:Numeric MultiValue - Stats,ExtendedStats,Percentile... - if (aggregation instanceof Stats) { - String[] statsHeaders = new String[]{"count", "sum", "avg", "min", "max"}; - boolean isExtendedStats = aggregation instanceof ExtendedStats; - if (isExtendedStats) { - String[] extendedHeaders = new String[]{"sumOfSquares", "variance", "stdDeviation"}; - statsHeaders = Util.concatStringsArrays(statsHeaders, extendedHeaders); - } - mergeHeadersWithPrefix(header, name, statsHeaders); - Stats stats = (Stats) aggregation; - line.add(String.valueOf(stats.getCount())); - line.add(stats.getSumAsString()); - line.add(stats.getAvgAsString()); - line.add(stats.getMinAsString()); - line.add(stats.getMaxAsString()); - if (isExtendedStats) { - ExtendedStats extendedStats = (ExtendedStats) aggregation; - line.add(extendedStats.getSumOfSquaresAsString()); - line.add(extendedStats.getVarianceAsString()); - line.add(extendedStats.getStdDeviationAsString()); - } - } else if (aggregation instanceof Percentiles) { - - final List percentileHeaders = new ArrayList<>(7); - final Percentiles percentiles = (Percentiles) aggregation; - - for (final Percentile p : percentiles) { - percentileHeaders.add(String.valueOf(p.getPercent())); - line.add(percentiles.percentileAsString(p.getPercent())); - } - mergeHeadersWithPrefix(header, name, percentileHeaders.toArray(new String[0])); - } else { - throw new CsvExtractorException( - "unknown NumericMetricsAggregation.MultiValue:" + aggregation.getClass()); - } - + if (aggregation instanceof NumericMetricsAggregation) { + handleNumericMetricAggregation(headers, lines.get(currentLineIndex), aggregation); + return; + } + if (aggregation instanceof GeoBounds) { + handleGeoBoundsAggregation(headers, lines, (GeoBounds) aggregation); + return; + } + if (aggregation instanceof TopHits) { + // todo: handle this . it returns hits... maby back to normal? + // todo: read about this usages + // TopHits topHitsAggregation = (TopHits) aggregation; + } + if (aggregation instanceof MultiBucketsAggregation) { + MultiBucketsAggregation bucketsAggregation = (MultiBucketsAggregation) aggregation; + String name = bucketsAggregation.getName(); + // checking because it can comes from sub aggregation again + if (!headers.contains(name)) { + headers.add(name); + } + Collection buckets = + bucketsAggregation.getBuckets(); + + // clone current line. + List currentLine = lines.get(this.currentLineIndex); + List clonedLine = new ArrayList<>(currentLine); + + // call handle_Agg with current_line++ + boolean firstLine = true; + for (MultiBucketsAggregation.Bucket bucket : buckets) { + // each bucket need to add new line with current line copied => except for first line + String key = bucket.getKeyAsString(); + if (firstLine) { + firstLine = false; } else { - throw new CsvExtractorException("unknown NumericMetricsAggregation" + aggregation.getClass()); + currentLineIndex++; + currentLine = new ArrayList(clonedLine); + lines.add(currentLine); } + currentLine.add(key); + handleAggregations(bucket.getAggregations(), headers, lines); + } } - - private void mergeHeadersWithPrefix(List header, String prefix, String[] newHeaders) { - for (int i = 0; i < newHeaders.length; i++) { - String newHeader = newHeaders[i]; - if (prefix != null && !prefix.equals("")) { - newHeader = prefix + "." + newHeader; - } - if (!header.contains(newHeader)) { - header.add(newHeader); - } - } + } + + private void handleGeoBoundsAggregation( + List headers, List> lines, GeoBounds geoBoundsAggregation) { + String geoBoundAggName = geoBoundsAggregation.getName(); + headers.add(geoBoundAggName + ".topLeft.lon"); + headers.add(geoBoundAggName + ".topLeft.lat"); + headers.add(geoBoundAggName + ".bottomRight.lon"); + headers.add(geoBoundAggName + ".bottomRight.lat"); + List line = lines.get(this.currentLineIndex); + line.add(String.valueOf(geoBoundsAggregation.topLeft().getLon())); + line.add(String.valueOf(geoBoundsAggregation.topLeft().getLat())); + line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLon())); + line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLat())); + lines.add(line); + } + + private List fillHeaderAndCreateLineForNumericAggregations( + Aggregations aggregations, List header) throws CsvExtractorException { + List line = new ArrayList<>(); + List aggregationList = aggregations.asList(); + for (Aggregation aggregation : aggregationList) { + handleNumericMetricAggregation(header, line, aggregation); } - - private boolean allNumericAggregations(Aggregations aggregations) { - List aggregationList = aggregations.asList(); - for (Aggregation aggregation : aggregationList) { - if (!(aggregation instanceof NumericMetricsAggregation)) { - return false; - } + return line; + } + + private void handleNumericMetricAggregation( + List header, List line, Aggregation aggregation) + throws CsvExtractorException { + final String name = aggregation.getName(); + + if (aggregation instanceof NumericMetricsAggregation.SingleValue) { + if (!header.contains(name)) { + header.add(name); + } + NumericMetricsAggregation.SingleValue agg = + (NumericMetricsAggregation.SingleValue) aggregation; + line.add(!Double.isInfinite(agg.value()) ? agg.getValueAsString() : "null"); + } else if (aggregation instanceof NumericMetricsAggregation.MultiValue) { + // todo:Numeric MultiValue - Stats,ExtendedStats,Percentile... + if (aggregation instanceof Stats) { + String[] statsHeaders = new String[] {"count", "sum", "avg", "min", "max"}; + boolean isExtendedStats = aggregation instanceof ExtendedStats; + if (isExtendedStats) { + String[] extendedHeaders = new String[] {"sumOfSquares", "variance", "stdDeviation"}; + statsHeaders = Util.concatStringsArrays(statsHeaders, extendedHeaders); } - return true; - } + mergeHeadersWithPrefix(header, name, statsHeaders); + Stats stats = (Stats) aggregation; + line.add(String.valueOf(stats.getCount())); + line.add(stats.getSumAsString()); + line.add(stats.getAvgAsString()); + line.add(stats.getMinAsString()); + line.add(stats.getMaxAsString()); + if (isExtendedStats) { + ExtendedStats extendedStats = (ExtendedStats) aggregation; + line.add(extendedStats.getSumOfSquaresAsString()); + line.add(extendedStats.getVarianceAsString()); + line.add(extendedStats.getStdDeviationAsString()); + } + } else if (aggregation instanceof Percentiles) { + + final List percentileHeaders = new ArrayList<>(7); + final Percentiles percentiles = (Percentiles) aggregation; - private Aggregation skipAggregations(Aggregation firstAggregation) { - while (firstAggregation instanceof SingleBucketAggregation) { - firstAggregation = getFirstAggregation(((SingleBucketAggregation) firstAggregation).getAggregations()); + for (final Percentile p : percentiles) { + percentileHeaders.add(String.valueOf(p.getPercent())); + line.add(percentiles.percentileAsString(p.getPercent())); } - return firstAggregation; + mergeHeadersWithPrefix(header, name, percentileHeaders.toArray(new String[0])); + } else { + throw new CsvExtractorException( + "unknown NumericMetricsAggregation.MultiValue:" + aggregation.getClass()); + } + + } else { + throw new CsvExtractorException("unknown NumericMetricsAggregation" + aggregation.getClass()); } - - private Aggregation getFirstAggregation(Aggregations aggregations) { - return aggregations.asList().get(0); + } + + private void mergeHeadersWithPrefix(List header, String prefix, String[] newHeaders) { + for (int i = 0; i < newHeaders.length; i++) { + String newHeader = newHeaders[i]; + if (prefix != null && !prefix.equals("")) { + newHeader = prefix + "." + newHeader; + } + if (!header.contains(newHeader)) { + header.add(newHeader); + } } + } + + private boolean allNumericAggregations(Aggregations aggregations) { + List aggregationList = aggregations.asList(); + for (Aggregation aggregation : aggregationList) { + if (!(aggregation instanceof NumericMetricsAggregation)) { + return false; + } + } + return true; + } - private List> createCSVLinesFromDocs(boolean flat, String separator, - List> docsAsMap, - List headers) { - List> csvLines = new ArrayList<>(); - for (Map doc : docsAsMap) { - List line = new ArrayList<>(); - for (String header : headers) { - line.add(findFieldValue(header, doc, flat, separator)); - } - csvLines.add(line); - } - return csvLines; + private Aggregation skipAggregations(Aggregation firstAggregation) { + while (firstAggregation instanceof SingleBucketAggregation) { + firstAggregation = + getFirstAggregation(((SingleBucketAggregation) firstAggregation).getAggregations()); + } + return firstAggregation; + } + + private Aggregation getFirstAggregation(Aggregations aggregations) { + return aggregations.asList().get(0); + } + + private List> createCSVLinesFromDocs( + boolean flat, String separator, List> docsAsMap, List headers) { + List> csvLines = new ArrayList<>(); + for (Map doc : docsAsMap) { + List line = new ArrayList<>(); + for (String header : headers) { + line.add(findFieldValue(header, doc, flat, separator)); + } + csvLines.add(line); + } + return csvLines; + } + + private List createHeadersAndFillDocsMap( + final boolean flat, + final SearchHit[] hits, + final List> docsAsMap, + final List fieldNames) { + final Set csvHeaders = new LinkedHashSet<>(); + if (fieldNames != null) { + csvHeaders.addAll(fieldNames); } - private List createHeadersAndFillDocsMap(final boolean flat, final SearchHit[] hits, - final List> docsAsMap, - final List fieldNames) { - final Set csvHeaders = new LinkedHashSet<>(); - if (fieldNames != null) { - csvHeaders.addAll(fieldNames); - } + for (final SearchHit hit : hits) { + final Map doc = hit.getSourceAsMap(); + final Map fields = hit.getFields(); + for (final DocumentField searchHitField : fields.values()) { + doc.put(searchHitField.getName(), searchHitField.getValue()); + } + + if (this.includeId) { + doc.put("_id", hit.getId()); + } + if (this.includeScore) { + doc.put("_score", hit.getScore()); + } + + // select function as field is a special case where each hit has non-null field (function) + // and sourceAsMap is all columns in index (the same as 'SELECT *') + if (fields.isEmpty()) { + mergeHeaders(csvHeaders, doc, flat); + } + docsAsMap.add(doc); + } - for (final SearchHit hit : hits) { - final Map doc = hit.getSourceAsMap(); - final Map fields = hit.getFields(); - for (final DocumentField searchHitField : fields.values()) { - doc.put(searchHitField.getName(), searchHitField.getValue()); - } - - if (this.includeId) { - doc.put("_id", hit.getId()); - } - if (this.includeScore) { - doc.put("_score", hit.getScore()); - } - - // select function as field is a special case where each hit has non-null field (function) - // and sourceAsMap is all columns in index (the same as 'SELECT *') - if (fields.isEmpty()) { - mergeHeaders(csvHeaders, doc, flat); - } - docsAsMap.add(doc); - } + return new ArrayList<>(csvHeaders); + } - return new ArrayList<>(csvHeaders); - } + private String findFieldValue( + String header, Map doc, boolean flat, String separator) { + if (flat && header.contains(".")) { + String[] split = header.split("\\."); + Object innerDoc = doc; - private String findFieldValue(String header, Map doc, boolean flat, String separator) { - if (flat && header.contains(".")) { - String[] split = header.split("\\."); - Object innerDoc = doc; - - for (String innerField : split) { - if (!(innerDoc instanceof Map)) { - return ""; - } - innerDoc = ((Map) innerDoc).get(innerField); - if (innerDoc == null) { - return ""; - } - } - return innerDoc.toString(); - } else { - if (doc.containsKey(header)) { - return String.valueOf(doc.get(header)); - } + for (String innerField : split) { + if (!(innerDoc instanceof Map)) { + return ""; } - return ""; - } - - private void mergeHeaders(Set headers, Map doc, boolean flat) { - if (!flat) { - headers.addAll(doc.keySet()); - return; + innerDoc = ((Map) innerDoc).get(innerField); + if (innerDoc == null) { + return ""; } - mergeFieldNamesRecursive(headers, doc, ""); + } + return innerDoc.toString(); + } else { + if (doc.containsKey(header)) { + return String.valueOf(doc.get(header)); + } } + return ""; + } - private void mergeFieldNamesRecursive(Set headers, Map doc, String prefix) { - for (Map.Entry field : doc.entrySet()) { - Object value = field.getValue(); - if (value instanceof Map) { - mergeFieldNamesRecursive(headers, (Map) value, prefix + field.getKey() + "."); - } else { - headers.add(prefix + field.getKey()); - } - } + private void mergeHeaders(Set headers, Map doc, boolean flat) { + if (!flat) { + headers.addAll(doc.keySet()); + return; + } + mergeFieldNamesRecursive(headers, doc, ""); + } + + private void mergeFieldNamesRecursive( + Set headers, Map doc, String prefix) { + for (Map.Entry field : doc.entrySet()) { + Object value = field.getValue(); + if (value instanceof Map) { + mergeFieldNamesRecursive( + headers, (Map) value, prefix + field.getKey() + "."); + } else { + headers.add(prefix + field.getKey()); + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java index 7e0f8e8ff9..cb289e4625 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java @@ -3,14 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; -/** - * Created by Eliran on 29/12/2015. - */ +/** Created by Eliran on 29/12/2015. */ public class CsvExtractorException extends Exception { - public CsvExtractorException(String message) { - super(message); - } + public CsvExtractorException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java index 7c8ed62a07..b4add64f9c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import org.opensearch.rest.RestRequest; @@ -11,16 +10,17 @@ public class CursorActionRequestRestExecutorFactory { - public static CursorAsyncRestExecutor createExecutor(RestRequest request, String cursorId, Format format) { + public static CursorAsyncRestExecutor createExecutor( + RestRequest request, String cursorId, Format format) { - if (isCursorCloseRequest(request)) { - return new CursorAsyncRestExecutor(new CursorCloseExecutor(cursorId)); - } else { - return new CursorAsyncRestExecutor(new CursorResultExecutor(cursorId, format)); - } + if (isCursorCloseRequest(request)) { + return new CursorAsyncRestExecutor(new CursorCloseExecutor(cursorId)); + } else { + return new CursorAsyncRestExecutor(new CursorResultExecutor(cursorId, format)); } + } - private static boolean isCursorCloseRequest(final RestRequest request) { - return request.path().endsWith("/_sql/close"); - } + private static boolean isCursorCloseRequest(final RestRequest request) { + return request.path().endsWith("/_sql/close"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java index 9b8e70c168..ffcf2adbf3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import java.io.IOException; @@ -25,84 +24,83 @@ import org.opensearch.threadpool.ThreadPool; public class CursorAsyncRestExecutor { - /** - * Custom thread pool name managed by OpenSearch - */ - public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; + /** Custom thread pool name managed by OpenSearch */ + public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; - private static final Logger LOG = LogManager.getLogger(CursorAsyncRestExecutor.class); + private static final Logger LOG = LogManager.getLogger(CursorAsyncRestExecutor.class); - /** - * Delegated rest executor to async - */ - private final CursorRestExecutor executor; + /** Delegated rest executor to async */ + private final CursorRestExecutor executor; + CursorAsyncRestExecutor(CursorRestExecutor executor) { + this.executor = executor; + } - CursorAsyncRestExecutor(CursorRestExecutor executor) { - this.executor = executor; - } + public void execute(Client client, Map params, RestChannel channel) { + async(client, params, channel); + } - public void execute(Client client, Map params, RestChannel channel) { - async(client, params, channel); - } + /** Run given task in thread pool asynchronously */ + private void async(Client client, Map params, RestChannel channel) { - /** - * Run given task in thread pool asynchronously - */ - private void async(Client client, Map params, RestChannel channel) { - - ThreadPool threadPool = client.threadPool(); - Runnable runnable = () -> { - try { - doExecuteWithTimeMeasured(client, params, channel); - } catch (IOException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an IO/SQL exception: {}", QueryContext.getRequestId(), - e.getMessage()); - e.printStackTrace(); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); - } catch (IllegalStateException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got a runtime exception: {}", QueryContext.getRequestId(), - e.getMessage()); - e.printStackTrace(); - channel.sendResponse(new BytesRestResponse(RestStatus.INSUFFICIENT_STORAGE, - "Memory circuit is broken.")); - } catch (Throwable t) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an unknown throwable: {}", QueryContext.getRequestId(), - t.getMessage()); - t.printStackTrace(); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, - String.valueOf(t.getMessage()))); - } finally { - BackOffRetryStrategy.releaseMem(executor); - } + ThreadPool threadPool = client.threadPool(); + Runnable runnable = + () -> { + try { + doExecuteWithTimeMeasured(client, params, channel); + } catch (IOException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an IO/SQL exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + e.printStackTrace(); + channel.sendResponse( + new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } catch (IllegalStateException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got a runtime exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + e.printStackTrace(); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INSUFFICIENT_STORAGE, "Memory circuit is broken.")); + } catch (Throwable t) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an unknown throwable: {}", + QueryContext.getRequestId(), + t.getMessage()); + t.printStackTrace(); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INTERNAL_SERVER_ERROR, String.valueOf(t.getMessage()))); + } finally { + BackOffRetryStrategy.releaseMem(executor); + } }; - // Preserve context of calling thread to ensure headers of requests are forwarded when running blocking actions - threadPool.schedule( - QueryContext.withCurrentContext(runnable), - new TimeValue(0L), - SQL_WORKER_THREAD_POOL_NAME - ); - } + // Preserve context of calling thread to ensure headers of requests are forwarded when running + // blocking actions + threadPool.schedule( + QueryContext.withCurrentContext(runnable), new TimeValue(0L), SQL_WORKER_THREAD_POOL_NAME); + } - /** - * Time the real execution of Executor and log slow query for troubleshooting - */ - private void doExecuteWithTimeMeasured(Client client, - Map params, - RestChannel channel) throws Exception { - long startTime = System.nanoTime(); - try { - executor.execute(client, params, channel); - } finally { - Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); - int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); - if (elapsed.getSeconds() >= slowLogThreshold) { - LOG.warn("[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); - } - } + /** Time the real execution of Executor and log slow query for troubleshooting */ + private void doExecuteWithTimeMeasured( + Client client, Map params, RestChannel channel) throws Exception { + long startTime = System.nanoTime(); + try { + executor.execute(client, params, channel); + } finally { + Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); + int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); + if (elapsed.getSeconds() >= slowLogThreshold) { + LOG.warn( + "[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java index 98e89c12e4..7282eaed4c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import static org.opensearch.core.rest.RestStatus.OK; @@ -25,66 +24,69 @@ public class CursorCloseExecutor implements CursorRestExecutor { - private static final Logger LOG = LogManager.getLogger(CursorCloseExecutor.class); - - private static final String SUCCEEDED_TRUE = "{\"succeeded\":true}"; - private static final String SUCCEEDED_FALSE = "{\"succeeded\":false}"; - - private String cursorId; - - public CursorCloseExecutor(String cursorId) { - this.cursorId = cursorId; - } - - public void execute(Client client, Map params, RestChannel channel) throws Exception { - try { - String formattedResponse = execute(client, params); - channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); - } catch (IllegalArgumentException | JSONException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - LOG.error("Error parsing the cursor", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } catch (OpenSearchException e) { - int status = (e.status().getStatus()); - if (status > 399 && status < 500) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - } else if (status > 499) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - } - LOG.error("Error completing cursor request", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } + private static final Logger LOG = LogManager.getLogger(CursorCloseExecutor.class); + + private static final String SUCCEEDED_TRUE = "{\"succeeded\":true}"; + private static final String SUCCEEDED_FALSE = "{\"succeeded\":false}"; + + private String cursorId; + + public CursorCloseExecutor(String cursorId) { + this.cursorId = cursorId; + } + + public void execute(Client client, Map params, RestChannel channel) + throws Exception { + try { + String formattedResponse = execute(client, params); + channel.sendResponse( + new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); + } catch (IllegalArgumentException | JSONException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + LOG.error("Error parsing the cursor", e); + channel.sendResponse(new BytesRestResponse(channel, e)); + } catch (OpenSearchException e) { + int status = (e.status().getStatus()); + if (status > 399 && status < 500) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + } else if (status > 499) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + } + LOG.error("Error completing cursor request", e); + channel.sendResponse(new BytesRestResponse(channel, e)); } + } - public String execute(Client client, Map params) throws Exception { - String[] splittedCursor = cursorId.split(":"); - - if (splittedCursor.length!=2) { - throw new VerificationException("Not able to parse invalid cursor"); - } - - String type = splittedCursor[0]; - CursorType cursorType = CursorType.getById(type); - - switch(cursorType) { - case DEFAULT: - DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); - return handleDefaultCursorCloseRequest(client, defaultCursor); - case AGGREGATION: - case JOIN: - default: throw new VerificationException("Unsupported cursor type [" + type + "]"); - } + public String execute(Client client, Map params) throws Exception { + String[] splittedCursor = cursorId.split(":"); + if (splittedCursor.length != 2) { + throw new VerificationException("Not able to parse invalid cursor"); } - private String handleDefaultCursorCloseRequest(Client client, DefaultCursor cursor) { - String scrollId = cursor.getScrollId(); - ClearScrollResponse clearScrollResponse = client.prepareClearScroll().addScrollId(scrollId).get(); - if (clearScrollResponse.isSucceeded()) { - return SUCCEEDED_TRUE; - } else { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - return SUCCEEDED_FALSE; - } + String type = splittedCursor[0]; + CursorType cursorType = CursorType.getById(type); + + switch (cursorType) { + case DEFAULT: + DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); + return handleDefaultCursorCloseRequest(client, defaultCursor); + case AGGREGATION: + case JOIN: + default: + throw new VerificationException("Unsupported cursor type [" + type + "]"); + } + } + + private String handleDefaultCursorCloseRequest(Client client, DefaultCursor cursor) { + String scrollId = cursor.getScrollId(); + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(scrollId).get(); + if (clearScrollResponse.isSucceeded()) { + return SUCCEEDED_TRUE; + } else { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + return SUCCEEDED_FALSE; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java index 5f294f8e32..4c4b854379 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java @@ -3,21 +3,16 @@ * SPDX-License-Identifier: Apache-2.0 */ - - package org.opensearch.sql.legacy.executor.cursor; import java.util.Map; import org.opensearch.client.Client; import org.opensearch.rest.RestChannel; -/** - * Interface to execute cursor request. - */ +/** Interface to execute cursor request. */ public interface CursorRestExecutor { - void execute(Client client, Map params, RestChannel channel) - throws Exception; + void execute(Client client, Map params, RestChannel channel) throws Exception; - String execute(Client client, Map params) throws Exception; + String execute(Client client, Map params) throws Exception; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java index 9753f8049c..620b8e7b86 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import static org.opensearch.core.rest.RestStatus.OK; @@ -34,99 +33,105 @@ public class CursorResultExecutor implements CursorRestExecutor { - private String cursorId; - private Format format; - - private static final Logger LOG = LogManager.getLogger(CursorResultExecutor.class); - - public CursorResultExecutor(String cursorId, Format format) { - this.cursorId = cursorId; - this.format = format; + private String cursorId; + private Format format; + + private static final Logger LOG = LogManager.getLogger(CursorResultExecutor.class); + + public CursorResultExecutor(String cursorId, Format format) { + this.cursorId = cursorId; + this.format = format; + } + + public void execute(Client client, Map params, RestChannel channel) + throws Exception { + try { + String formattedResponse = execute(client, params); + channel.sendResponse( + new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); + } catch (IllegalArgumentException | JSONException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + LOG.error("Error parsing the cursor", e); + channel.sendResponse(new BytesRestResponse(channel, e)); + } catch (OpenSearchException e) { + int status = (e.status().getStatus()); + if (status > 399 && status < 500) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + } else if (status > 499) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + } + LOG.error("Error completing cursor request", e); + channel.sendResponse(new BytesRestResponse(channel, e)); } + } - public void execute(Client client, Map params, RestChannel channel) throws Exception { - try { - String formattedResponse = execute(client, params); - channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); - } catch (IllegalArgumentException | JSONException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - LOG.error("Error parsing the cursor", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } catch (OpenSearchException e) { - int status = (e.status().getStatus()); - if (status > 399 && status < 500) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - } else if (status > 499) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - } - LOG.error("Error completing cursor request", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } - } - - public String execute(Client client, Map params) throws Exception { - /** - * All cursor's are of the form : - * The serialized form before encoding is upto Cursor implementation - */ - String[] splittedCursor = cursorId.split(":", 2); + public String execute(Client client, Map params) throws Exception { + /** + * All cursor's are of the form : The serialized form before + * encoding is upto Cursor implementation + */ + String[] splittedCursor = cursorId.split(":", 2); - if (splittedCursor.length!=2) { - throw new VerificationException("Not able to parse invalid cursor"); - } - - String type = splittedCursor[0]; - CursorType cursorType = CursorType.getById(type); + if (splittedCursor.length != 2) { + throw new VerificationException("Not able to parse invalid cursor"); + } - switch(cursorType) { - case DEFAULT: - DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); - return handleDefaultCursorRequest(client, defaultCursor); - case AGGREGATION: - case JOIN: - default: throw new VerificationException("Unsupported cursor type [" + type + "]"); - } + String type = splittedCursor[0]; + CursorType cursorType = CursorType.getById(type); + + switch (cursorType) { + case DEFAULT: + DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); + return handleDefaultCursorRequest(client, defaultCursor); + case AGGREGATION: + case JOIN: + default: + throw new VerificationException("Unsupported cursor type [" + type + "]"); } + } - private String handleDefaultCursorRequest(Client client, DefaultCursor cursor) { - String previousScrollId = cursor.getScrollId(); - LocalClusterState clusterState = LocalClusterState.state(); - TimeValue scrollTimeout = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - SearchResponse scrollResponse = client.prepareSearchScroll(previousScrollId).setScroll(scrollTimeout).get(); - SearchHits searchHits = scrollResponse.getHits(); - SearchHit[] searchHitArray = searchHits.getHits(); - String newScrollId = scrollResponse.getScrollId(); + private String handleDefaultCursorRequest(Client client, DefaultCursor cursor) { + String previousScrollId = cursor.getScrollId(); + LocalClusterState clusterState = LocalClusterState.state(); + TimeValue scrollTimeout = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + SearchResponse scrollResponse = + client.prepareSearchScroll(previousScrollId).setScroll(scrollTimeout).get(); + SearchHits searchHits = scrollResponse.getHits(); + SearchHit[] searchHitArray = searchHits.getHits(); + String newScrollId = scrollResponse.getScrollId(); - int rowsLeft = (int) cursor.getRowsLeft(); - int fetch = cursor.getFetchSize(); + int rowsLeft = (int) cursor.getRowsLeft(); + int fetch = cursor.getFetchSize(); if (rowsLeft < fetch && rowsLeft < searchHitArray.length) { /** * This condition implies we are on the last page, and we might need to truncate the result from SearchHit[] * Avoid truncating in following two scenarios - * 1. number of rows to be sent equals fetchSize - * 2. size of SearchHit[] is already less that rows that needs to be sent - * + *
    + *
  1. number of rows to be sent equals fetchSize + *
  2. size of SearchHit[] is already less that rows that needs to be sent + *
* Else truncate to desired number of rows */ SearchHit[] newSearchHits = Arrays.copyOf(searchHitArray, rowsLeft); searchHits = new SearchHits(newSearchHits, searchHits.getTotalHits(), searchHits.getMaxScore()); } - rowsLeft = rowsLeft - fetch; + rowsLeft = rowsLeft - fetch; - if (rowsLeft <=0) { - /** Clear the scroll context on last page */ - ClearScrollResponse clearScrollResponse = client.prepareClearScroll().addScrollId(newScrollId).get(); - if (!clearScrollResponse.isSucceeded()) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.info("Error closing the cursor context {} ", newScrollId); - } - } - - cursor.setRowsLeft(rowsLeft); - cursor.setScrollId(newScrollId); - Protocol protocol = new Protocol(client, searchHits, format.name().toLowerCase(), cursor); - return protocol.cursorFormat(); + if (rowsLeft <= 0) { + /** Clear the scroll context on last page */ + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(newScrollId).get(); + if (!clearScrollResponse.isSucceeded()) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.info("Error closing the cursor context {} ", newScrollId); + } } + + cursor.setRowsLeft(rowsLeft); + cursor.setScrollId(newScrollId); + Protocol protocol = new Protocol(client, searchHits, format.name().toLowerCase(), cursor); + return protocol.cursorFormat(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java index d9eb463572..872442f04f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.opensearch.sql.legacy.executor.format.DateFieldFormatter.FORMAT_JDBC; @@ -18,43 +17,44 @@ import org.opensearch.sql.legacy.expression.model.ExprValue; import org.opensearch.sql.legacy.query.planner.core.ColumnNode; -/** - * The definition of BindingTuple ResultSet. - */ +/** The definition of BindingTuple ResultSet. */ public class BindingTupleResultSet extends ResultSet { - public BindingTupleResultSet(List columnNodes, List bindingTuples) { - this.schema = buildSchema(columnNodes); - this.dataRows = buildDataRows(columnNodes, bindingTuples); - } - - @VisibleForTesting - public static Schema buildSchema(List columnNodes) { - List columnList = columnNodes.stream() - .map(node -> new Schema.Column( - node.getName(), - node.getAlias(), - node.getType())) - .collect(Collectors.toList()); - return new Schema(columnList); - } - - @VisibleForTesting - public static DataRows buildDataRows(List columnNodes, List bindingTuples) { - List rowList = bindingTuples.stream().map(tuple -> { - Map bindingMap = tuple.getBindingMap(); - Map rowMap = new HashMap<>(); - for (ColumnNode column : columnNodes) { - String columnName = column.columnName(); - Object value = bindingMap.get(columnName).value(); - if (column.getType() == Schema.Type.DATE) { - value = DateFormat.getFormattedDate(new Date((Long) value), FORMAT_JDBC); - } - rowMap.put(columnName, value); - } - return new DataRows.Row(rowMap); - }).collect(Collectors.toList()); - - return new DataRows(bindingTuples.size(), bindingTuples.size(), rowList); - } + public BindingTupleResultSet(List columnNodes, List bindingTuples) { + this.schema = buildSchema(columnNodes); + this.dataRows = buildDataRows(columnNodes, bindingTuples); + } + + @VisibleForTesting + public static Schema buildSchema(List columnNodes) { + List columnList = + columnNodes.stream() + .map(node -> new Schema.Column(node.getName(), node.getAlias(), node.getType())) + .collect(Collectors.toList()); + return new Schema(columnList); + } + + @VisibleForTesting + public static DataRows buildDataRows( + List columnNodes, List bindingTuples) { + List rowList = + bindingTuples.stream() + .map( + tuple -> { + Map bindingMap = tuple.getBindingMap(); + Map rowMap = new HashMap<>(); + for (ColumnNode column : columnNodes) { + String columnName = column.columnName(); + Object value = bindingMap.get(columnName).value(); + if (column.getType() == Schema.Type.DATE) { + value = DateFormat.getFormattedDate(new Date((Long) value), FORMAT_JDBC); + } + rowMap.put(columnName, value); + } + return new DataRows.Row(rowMap); + }) + .collect(Collectors.toList()); + + return new DataRows(bindingTuples.size(), bindingTuples.size(), rowList); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java index 541d3200a5..fc153afae8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.Iterator; @@ -12,76 +11,76 @@ public class DataRows implements Iterable { - private long size; - private long totalHits; - private List rows; - - public DataRows(long size, long totalHits, List rows) { - this.size = size; - this.totalHits = totalHits; - this.rows = rows; + private long size; + private long totalHits; + private List rows; + + public DataRows(long size, long totalHits, List rows) { + this.size = size; + this.totalHits = totalHits; + this.rows = rows; + } + + public DataRows(List rows) { + this.size = rows.size(); + this.totalHits = rows.size(); + this.rows = rows; + } + + public long getSize() { + return size; + } + + public long getTotalHits() { + return totalHits; + } + + // Iterator method for DataRows + @Override + public Iterator iterator() { + return new Iterator() { + private final Iterator iter = rows.iterator(); + + @Override + public boolean hasNext() { + return iter.hasNext(); + } + + @Override + public Row next() { + return iter.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("No changes allowed to DataRows rows"); + } + }; + } + + // Inner class for Row object + public static class Row { + + private Map data; + + public Row(Map data) { + this.data = data; } - public DataRows(List rows) { - this.size = rows.size(); - this.totalHits = rows.size(); - this.rows = rows; + public Map getContents() { + return data; } - public long getSize() { - return size; + public boolean hasField(String field) { + return data.containsKey(field); } - public long getTotalHits() { - return totalHits; + public Object getData(String field) { + return data.get(field); } - // Iterator method for DataRows - @Override - public Iterator iterator() { - return new Iterator() { - private final Iterator iter = rows.iterator(); - - @Override - public boolean hasNext() { - return iter.hasNext(); - } - - @Override - public Row next() { - return iter.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("No changes allowed to DataRows rows"); - } - }; - } - - // Inner class for Row object - public static class Row { - - private Map data; - - public Row(Map data) { - this.data = data; - } - - public Map getContents() { - return data; - } - - public boolean hasField(String field) { - return data.containsKey(field); - } - - public Object getData(String field) { - return data.get(field); - } - - public Object getDataOrDefault(String field, Object defaultValue) { - return data.getOrDefault(field, defaultValue); - } + public Object getDataOrDefault(String field, Object defaultValue) { + return data.getOrDefault(field, defaultValue); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java index aa803975df..dc239abd84 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import com.google.common.annotations.VisibleForTesting; @@ -23,163 +22,169 @@ import org.opensearch.sql.legacy.esdomain.LocalClusterState; import org.opensearch.sql.legacy.esdomain.mapping.FieldMappings; -/** - * Formatter to transform date fields into a consistent format for consumption by clients. - */ +/** Formatter to transform date fields into a consistent format for consumption by clients. */ public class DateFieldFormatter { - private static final Logger LOG = LogManager.getLogger(DateFieldFormatter.class); - public static final String FORMAT_JDBC = "yyyy-MM-dd HH:mm:ss.SSS"; - private static final String FORMAT_DELIMITER = "\\|\\|"; - - private static final String FORMAT_DOT_DATE_AND_TIME = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; - private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION = "yyyy-MM-dd'T'HH:mm:ss"; - private static final String - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME = "yyyy-MM-dd'T'"; - private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION = "yyyy-MM-dd'T'HH:mm:ssXXX"; - private static final String FORMAT_DOT_DATE = DateFormat.getFormatString("date"); - - private final Map> dateFieldFormatMap; - private final Map fieldAliasMap; - private Set dateColumns; - - public DateFieldFormatter(String indexName, List columns, Map fieldAliasMap) { - this.dateFieldFormatMap = getDateFieldFormatMap(indexName); - this.dateColumns = getDateColumns(columns); - this.fieldAliasMap = fieldAliasMap; + private static final Logger LOG = LogManager.getLogger(DateFieldFormatter.class); + public static final String FORMAT_JDBC = "yyyy-MM-dd HH:mm:ss.SSS"; + private static final String FORMAT_DELIMITER = "\\|\\|"; + + private static final String FORMAT_DOT_DATE_AND_TIME = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION = + "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; + private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION = + "yyyy-MM-dd'T'HH:mm:ss"; + private static final String + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME = "yyyy-MM-dd'T'"; + private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION = + "yyyy-MM-dd'T'HH:mm:ssXXX"; + private static final String FORMAT_DOT_DATE = DateFormat.getFormatString("date"); + + private final Map> dateFieldFormatMap; + private final Map fieldAliasMap; + private Set dateColumns; + + public DateFieldFormatter( + String indexName, List columns, Map fieldAliasMap) { + this.dateFieldFormatMap = getDateFieldFormatMap(indexName); + this.dateColumns = getDateColumns(columns); + this.fieldAliasMap = fieldAliasMap; + } + + @VisibleForTesting + protected DateFieldFormatter( + Map> dateFieldFormatMap, + List columns, + Map fieldAliasMap) { + this.dateFieldFormatMap = dateFieldFormatMap; + this.dateColumns = getDateColumns(columns); + this.fieldAliasMap = fieldAliasMap; + } + + /** + * Apply the JDBC date format ({@code yyyy-MM-dd HH:mm:ss.SSS}) to date values in the current row. + * + * @param rowSource The row in which to format the date values. + */ + public void applyJDBCDateFormat(Map rowSource) { + for (String columnName : dateColumns) { + Object columnOriginalDate = rowSource.get(columnName); + if (columnOriginalDate == null) { + // Don't try to parse null date values + continue; + } + + List formats = getFormatsForColumn(columnName); + if (formats == null) { + LOG.warn( + "Could not determine date formats for column {}; returning original value", columnName); + continue; + } + + Date date = parseDateString(formats, columnOriginalDate.toString()); + if (date != null) { + rowSource.put(columnName, DateFormat.getFormattedDate(date, FORMAT_JDBC)); + break; + } else { + LOG.warn("Could not parse date value; returning original value"); + } } - - @VisibleForTesting - protected DateFieldFormatter(Map> dateFieldFormatMap, - List columns, - Map fieldAliasMap) { - this.dateFieldFormatMap = dateFieldFormatMap; - this.dateColumns = getDateColumns(columns); - this.fieldAliasMap = fieldAliasMap; + } + + private List getFormatsForColumn(String columnName) { + // Handle special cases for column names + if (fieldAliasMap.get(columnName) != null) { + // Column was aliased, and we need to find the base name for the column + columnName = fieldAliasMap.get(columnName); + } else if (columnName.split("\\.").length == 2) { + // Column is part of a join, and is qualified by the table alias + columnName = columnName.split("\\.")[1]; } - - /** - * Apply the JDBC date format ({@code yyyy-MM-dd HH:mm:ss.SSS}) to date values in the current row. - * - * @param rowSource The row in which to format the date values. - */ - public void applyJDBCDateFormat(Map rowSource) { - for (String columnName : dateColumns) { - Object columnOriginalDate = rowSource.get(columnName); - if (columnOriginalDate == null) { - // Don't try to parse null date values - continue; - } - - List formats = getFormatsForColumn(columnName); - if (formats == null) { - LOG.warn("Could not determine date formats for column {}; returning original value", columnName); - continue; - } - - Date date = parseDateString(formats, columnOriginalDate.toString()); - if (date != null) { - rowSource.put(columnName, DateFormat.getFormattedDate(date, FORMAT_JDBC)); - break; - } else { - LOG.warn("Could not parse date value; returning original value"); - } + return dateFieldFormatMap.get(columnName); + } + + private Set getDateColumns(List columns) { + return columns.stream() + .filter(column -> column.getType().equals(Schema.Type.DATE.nameLowerCase())) + .map(Schema.Column::getName) + .collect(Collectors.toSet()); + } + + private Map> getDateFieldFormatMap(String indexName) { + LocalClusterState state = LocalClusterState.state(); + Map> formatMap = new HashMap<>(); + + String[] indices = indexName.split("\\|"); + Collection typeProperties = state.getFieldMappings(indices).allMappings(); + + for (FieldMappings fieldMappings : typeProperties) { + for (Map.Entry> field : fieldMappings.data().entrySet()) { + String fieldName = field.getKey(); + Map properties = field.getValue(); + + if (properties.containsKey("format")) { + formatMap.put(fieldName, getFormatsFromProperties(properties.get("format").toString())); + } else { + // Give all field types a format, since operations such as casts + // can change the output type for a field to `date`. + formatMap.put(fieldName, getFormatsFromProperties("date_optional_time")); } + } } - private List getFormatsForColumn(String columnName) { - // Handle special cases for column names - if (fieldAliasMap.get(columnName) != null) { - // Column was aliased, and we need to find the base name for the column - columnName = fieldAliasMap.get(columnName); - } else if (columnName.split("\\.").length == 2) { - // Column is part of a join, and is qualified by the table alias - columnName = columnName.split("\\.")[1]; - } - return dateFieldFormatMap.get(columnName); - } - - private Set getDateColumns(List columns) { - return columns.stream() - .filter(column -> column.getType().equals(Schema.Type.DATE.nameLowerCase())) - .map(Schema.Column::getName) - .collect(Collectors.toSet()); - } - - private Map> getDateFieldFormatMap(String indexName) { - LocalClusterState state = LocalClusterState.state(); - Map> formatMap = new HashMap<>(); - - String[] indices = indexName.split("\\|"); - Collection typeProperties = state.getFieldMappings(indices) - .allMappings(); - - for (FieldMappings fieldMappings: typeProperties) { - for (Map.Entry> field : fieldMappings.data().entrySet()) { - String fieldName = field.getKey(); - Map properties = field.getValue(); - - if (properties.containsKey("format")) { - formatMap.put(fieldName, getFormatsFromProperties(properties.get("format").toString())); - } else { - // Give all field types a format, since operations such as casts - // can change the output type for a field to `date`. - formatMap.put(fieldName, getFormatsFromProperties("date_optional_time")); - } + return formatMap; + } + + private List getFormatsFromProperties(String formatProperty) { + String[] formats = formatProperty.split(FORMAT_DELIMITER); + return Arrays.asList(formats); + } + + private Date parseDateString(List formats, String columnOriginalDate) { + TimeZone originalDefaultTimeZone = TimeZone.getDefault(); + Date parsedDate = null; + + // Apache Commons DateUtils uses the default TimeZone for the JVM when parsing. + // However, since all dates on OpenSearch are stored as UTC, we need to + // parse these values using the UTC timezone. + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + for (String columnFormat : formats) { + try { + switch (columnFormat) { + case "date_optional_time": + case "strict_date_optional_time": + parsedDate = + DateUtils.parseDate( + columnOriginalDate, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION, + FORMAT_DOT_DATE_AND_TIME, + FORMAT_DOT_DATE); + break; + case "epoch_millis": + parsedDate = new Date(Long.parseLong(columnOriginalDate)); + break; + case "epoch_second": + parsedDate = new Date(Long.parseLong(columnOriginalDate) * 1000); + break; + default: + String formatString = DateFormat.getFormatString(columnFormat); + if (formatString == null) { + // Custom format; take as-is + formatString = columnFormat; } + parsedDate = DateUtils.parseDate(columnOriginalDate, formatString); } - - return formatMap; - } - - private List getFormatsFromProperties(String formatProperty) { - String[] formats = formatProperty.split(FORMAT_DELIMITER); - return Arrays.asList(formats); + } catch (ParseException | NumberFormatException e) { + LOG.warn( + String.format( + "Could not parse date string %s as %s", columnOriginalDate, columnFormat)); + } } + // Reset default timezone after parsing + TimeZone.setDefault(originalDefaultTimeZone); - private Date parseDateString(List formats, String columnOriginalDate) { - TimeZone originalDefaultTimeZone = TimeZone.getDefault(); - Date parsedDate = null; - - // Apache Commons DateUtils uses the default TimeZone for the JVM when parsing. - // However, since all dates on OpenSearch are stored as UTC, we need to - // parse these values using the UTC timezone. - TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - for (String columnFormat : formats) { - try { - switch (columnFormat) { - case "date_optional_time": - case "strict_date_optional_time": - parsedDate = DateUtils.parseDate( - columnOriginalDate, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION, - FORMAT_DOT_DATE_AND_TIME, - FORMAT_DOT_DATE); - break; - case "epoch_millis": - parsedDate = new Date(Long.parseLong(columnOriginalDate)); - break; - case "epoch_second": - parsedDate = new Date(Long.parseLong(columnOriginalDate) * 1000); - break; - default: - String formatString = DateFormat.getFormatString(columnFormat); - if (formatString == null) { - // Custom format; take as-is - formatString = columnFormat; - } - parsedDate = DateUtils.parseDate(columnOriginalDate, formatString); - } - } catch (ParseException | NumberFormatException e) { - LOG.warn(String.format("Could not parse date string %s as %s", columnOriginalDate, columnFormat)); - } - } - // Reset default timezone after parsing - TimeZone.setDefault(originalDefaultTimeZone); - - return parsedDate; - } + return parsedDate; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java index 40151c9413..fc9237918c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.time.Instant; @@ -15,112 +14,121 @@ public class DateFormat { - private static Map formatMap = new HashMap<>(); - - static { - // Special cases that are parsed separately - formatMap.put("date_optional_time", ""); - formatMap.put("strict_date_optional_time", ""); - formatMap.put("epoch_millis", ""); - formatMap.put("epoch_second", ""); - - formatMap.put("basic_date", Date.BASIC_DATE); - formatMap.put("basic_date_time", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_date_time_no_millis", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_ordinal_date", Date.BASIC_ORDINAL_DATE); - formatMap.put("basic_ordinal_date_time", - Date.BASIC_ORDINAL_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_ordinal_date_time_no_millis", Date.BASIC_ORDINAL_DATE+ Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_time", Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_time_no_millis", Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_t_time", Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_t_time_no_millis", Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_week_date", Date.BASIC_WEEK_DATE); - formatMap.put("basic_week_date_time", Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_week_date_time_no_millis", Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("date", Date.DATE); - formatMap.put("date_hour", Date.DATE + Time.T + Time.HOUR); - formatMap.put("date_hour_minute", Date.DATE + Time.T + Time.HOUR_MINUTE); - formatMap.put("date_hour_minute_second", Date.DATE + Time.T + Time.TIME); - formatMap.put("date_hour_minute_second_fraction", Date.DATE + Time.T + Time.TIME + Time.MILLIS); - formatMap.put("date_hour_minute_second_millis", Date.DATE + Time.T + Time.TIME + Time.MILLIS); - formatMap.put("date_time", Date.DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("date_time_no_millis", Date.DATE + Time.T + Time.TIME + Time.TZZ); - - formatMap.put("hour", Time.HOUR); - formatMap.put("hour_minute", Time.HOUR_MINUTE); - formatMap.put("hour_minute_second", Time.TIME); - formatMap.put("hour_minute_second_fraction", Time.TIME + Time.MILLIS); - formatMap.put("hour_minute_second_millis", Time.TIME + Time.MILLIS); - - formatMap.put("ordinal_date", Date.ORDINAL_DATE); - formatMap.put("ordinal_date_time", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("ordinal_date_time_no_millis", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.TZZ); - - formatMap.put("time", Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("time_no_millis", Time.TIME + Time.TZZ); - - formatMap.put("t_time", Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("t_time_no_millis", Time.T + Time.TIME + Time.TZZ); - - formatMap.put("week_date", Date.WEEK_DATE); - formatMap.put("week_date_time", Date.WEEK_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("week_date_time_no_millis", Date.WEEK_DATE + Time.T + Time.TIME + Time.TZZ); - - // Note: input mapping is "weekyear", but output value is "week_year" - formatMap.put("week_year", Date.WEEKYEAR); - formatMap.put("weekyear_week", Date.WEEKYEAR_WEEK); - formatMap.put("weekyear_week_day", Date.WEEK_DATE); - - formatMap.put("year", Date.YEAR); - formatMap.put("year_month", Date.YEAR_MONTH); - formatMap.put("year_month_day", Date.DATE); - } - - private DateFormat() { - } - - public static String getFormatString(String formatName) { - return formatMap.get(formatName); - } - - public static String getFormattedDate(java.util.Date date, String dateFormat) { - Instant instant = date.toInstant(); - ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of("Etc/UTC")); - return zdt.format(DateTimeFormatter.ofPattern(dateFormat)); - } - - private static class Date { - static String BASIC_DATE = "yyyyMMdd"; - static String BASIC_ORDINAL_DATE = "yyyyDDD"; - static String BASIC_WEEK_DATE = "YYYY'W'wwu"; - - static String DATE = "yyyy-MM-dd"; - static String ORDINAL_DATE = "yyyy-DDD"; - - static String YEAR = "yyyy"; - static String YEAR_MONTH = "yyyy-MM"; - - static String WEEK_DATE = "YYYY-'W'ww-u"; - static String WEEKYEAR = "YYYY"; - static String WEEKYEAR_WEEK = "YYYY-'W'ww"; - } - - private static class Time { - static String T = "'T'"; - static String BASIC_TIME = "HHmmss"; - static String TIME = "HH:mm:ss"; - - static String HOUR = "HH"; - static String HOUR_MINUTE = "HH:mm"; - - static String MILLIS = ".SSS"; - static String TZ = "Z"; - static String TZZ = "XX"; - } + private static Map formatMap = new HashMap<>(); + + static { + // Special cases that are parsed separately + formatMap.put("date_optional_time", ""); + formatMap.put("strict_date_optional_time", ""); + formatMap.put("epoch_millis", ""); + formatMap.put("epoch_second", ""); + + formatMap.put("basic_date", Date.BASIC_DATE); + formatMap.put( + "basic_date_time", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put( + "basic_date_time_no_millis", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_ordinal_date", Date.BASIC_ORDINAL_DATE); + formatMap.put( + "basic_ordinal_date_time", + Date.BASIC_ORDINAL_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put( + "basic_ordinal_date_time_no_millis", + Date.BASIC_ORDINAL_DATE + Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_time", Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put("basic_time_no_millis", Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_t_time", Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put("basic_t_time_no_millis", Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_week_date", Date.BASIC_WEEK_DATE); + formatMap.put( + "basic_week_date_time", + Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put( + "basic_week_date_time_no_millis", + Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("date", Date.DATE); + formatMap.put("date_hour", Date.DATE + Time.T + Time.HOUR); + formatMap.put("date_hour_minute", Date.DATE + Time.T + Time.HOUR_MINUTE); + formatMap.put("date_hour_minute_second", Date.DATE + Time.T + Time.TIME); + formatMap.put("date_hour_minute_second_fraction", Date.DATE + Time.T + Time.TIME + Time.MILLIS); + formatMap.put("date_hour_minute_second_millis", Date.DATE + Time.T + Time.TIME + Time.MILLIS); + formatMap.put("date_time", Date.DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("date_time_no_millis", Date.DATE + Time.T + Time.TIME + Time.TZZ); + + formatMap.put("hour", Time.HOUR); + formatMap.put("hour_minute", Time.HOUR_MINUTE); + formatMap.put("hour_minute_second", Time.TIME); + formatMap.put("hour_minute_second_fraction", Time.TIME + Time.MILLIS); + formatMap.put("hour_minute_second_millis", Time.TIME + Time.MILLIS); + + formatMap.put("ordinal_date", Date.ORDINAL_DATE); + formatMap.put( + "ordinal_date_time", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("ordinal_date_time_no_millis", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.TZZ); + + formatMap.put("time", Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("time_no_millis", Time.TIME + Time.TZZ); + + formatMap.put("t_time", Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("t_time_no_millis", Time.T + Time.TIME + Time.TZZ); + + formatMap.put("week_date", Date.WEEK_DATE); + formatMap.put("week_date_time", Date.WEEK_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("week_date_time_no_millis", Date.WEEK_DATE + Time.T + Time.TIME + Time.TZZ); + + // Note: input mapping is "weekyear", but output value is "week_year" + formatMap.put("week_year", Date.WEEKYEAR); + formatMap.put("weekyear_week", Date.WEEKYEAR_WEEK); + formatMap.put("weekyear_week_day", Date.WEEK_DATE); + + formatMap.put("year", Date.YEAR); + formatMap.put("year_month", Date.YEAR_MONTH); + formatMap.put("year_month_day", Date.DATE); + } + + private DateFormat() {} + + public static String getFormatString(String formatName) { + return formatMap.get(formatName); + } + + public static String getFormattedDate(java.util.Date date, String dateFormat) { + Instant instant = date.toInstant(); + ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of("Etc/UTC")); + return zdt.format(DateTimeFormatter.ofPattern(dateFormat)); + } + + private static class Date { + static String BASIC_DATE = "yyyyMMdd"; + static String BASIC_ORDINAL_DATE = "yyyyDDD"; + static String BASIC_WEEK_DATE = "YYYY'W'wwu"; + + static String DATE = "yyyy-MM-dd"; + static String ORDINAL_DATE = "yyyy-DDD"; + + static String YEAR = "yyyy"; + static String YEAR_MONTH = "yyyy-MM"; + + static String WEEK_DATE = "YYYY-'W'ww-u"; + static String WEEKYEAR = "YYYY"; + static String WEEKYEAR_WEEK = "YYYY-'W'ww"; + } + + private static class Time { + static String T = "'T'"; + static String BASIC_TIME = "HHmmss"; + static String TIME = "HH:mm:ss"; + + static String HOUR = "HH"; + static String HOUR_MINUTE = "HH:mm"; + + static String MILLIS = ".SSS"; + static String TZ = "Z"; + static String TZZ = "XX"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java index ccecacc432..24afb0a7af 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.Collections; @@ -14,28 +13,28 @@ import org.opensearch.sql.legacy.domain.Delete; public class DeleteResultSet extends ResultSet { - private Delete query; - private Object queryResult; - - public static final String DELETED = "deleted_rows"; - - public DeleteResultSet(Client client, Delete query, Object queryResult) { - this.client = client; - this.query = query; - this.queryResult = queryResult; - this.schema = new Schema(loadColumns()); - this.dataRows = new DataRows(loadRows()); - } - - private List loadColumns() { - return Collections.singletonList(new Schema.Column(DELETED, null, Schema.Type.LONG)); - } - - private List loadRows() { - return Collections.singletonList(new DataRows.Row(loadDeletedData())); - } - - private Map loadDeletedData(){ - return Collections.singletonMap(DELETED, ((BulkByScrollResponse) queryResult).getDeleted()); - } + private Delete query; + private Object queryResult; + + public static final String DELETED = "deleted_rows"; + + public DeleteResultSet(Client client, Delete query, Object queryResult) { + this.client = client; + this.query = query; + this.queryResult = queryResult; + this.schema = new Schema(loadColumns()); + this.dataRows = new DataRows(loadRows()); + } + + private List loadColumns() { + return Collections.singletonList(new Schema.Column(DELETED, null, Schema.Type.LONG)); + } + + private List loadRows() { + return Collections.singletonList(new DataRows.Row(loadDeletedData())); + } + + private Map loadDeletedData() { + return Collections.singletonMap(DELETED, ((BulkByScrollResponse) queryResult).getDeleted()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java index 0cccf73268..eba6db2453 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.ArrayList; @@ -21,145 +20,142 @@ public class DescribeResultSet extends ResultSet { - private static final int DEFAULT_NUM_PREC_RADIX = 10; - private static final String IS_AUTOINCREMENT = "NO"; - - /** - * You are not required to set the field type to object explicitly, as this is the default value. - * https://www.elastic.co/guide/en/elasticsearch/reference/current/object.html - */ - public static final String DEFAULT_OBJECT_DATATYPE = "object"; - - private IndexStatement statement; - private Object queryResult; - - public DescribeResultSet(Client client, IndexStatement statement, Object queryResult) { - this.client = client; - this.clusterName = getClusterName(); - this.statement = statement; - this.queryResult = queryResult; - - this.schema = new Schema(statement, loadColumns()); - this.dataRows = new DataRows(loadRows()); + private static final int DEFAULT_NUM_PREC_RADIX = 10; + private static final String IS_AUTOINCREMENT = "NO"; + + /** + * You are not required to set the field type to object explicitly, as this is the default value. + * https://www.elastic.co/guide/en/elasticsearch/reference/current/object.html + */ + public static final String DEFAULT_OBJECT_DATATYPE = "object"; + + private IndexStatement statement; + private Object queryResult; + + public DescribeResultSet(Client client, IndexStatement statement, Object queryResult) { + this.client = client; + this.clusterName = getClusterName(); + this.statement = statement; + this.queryResult = queryResult; + + this.schema = new Schema(statement, loadColumns()); + this.dataRows = new DataRows(loadRows()); + } + + private List loadColumns() { + List columns = new ArrayList<>(); + // Unused Columns are still included in Schema to match JDBC/ODBC standard + columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); + columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); + columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); + columns.add(new Column("COLUMN_NAME", null, Type.KEYWORD)); + columns.add(new Column("DATA_TYPE", null, Type.INTEGER)); + columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); + columns.add(new Column("COLUMN_SIZE", null, Type.INTEGER)); + columns.add(new Column("BUFFER_LENGTH", null, Type.INTEGER)); // Not used + columns.add(new Column("DECIMAL_DIGITS", null, Type.INTEGER)); + columns.add(new Column("NUM_PREC_RADIX", null, Type.INTEGER)); + columns.add(new Column("NULLABLE", null, Type.INTEGER)); + columns.add(new Column("REMARKS", null, Type.KEYWORD)); + columns.add(new Column("COLUMN_DEF", null, Type.KEYWORD)); + columns.add(new Column("SQL_DATA_TYPE", null, Type.INTEGER)); // Not used + columns.add(new Column("SQL_DATETIME_SUB", null, Type.INTEGER)); // Not used + columns.add(new Column("CHAR_OCTET_LENGTH", null, Type.INTEGER)); + columns.add(new Column("ORDINAL_POSITION", null, Type.INTEGER)); + columns.add(new Column("IS_NULLABLE", null, Type.KEYWORD)); + columns.add(new Column("SCOPE_CATALOG", null, Type.KEYWORD)); // Not used + columns.add(new Column("SCOPE_SCHEMA", null, Type.KEYWORD)); // Not used + columns.add(new Column("SCOPE_TABLE", null, Type.KEYWORD)); // Not used + columns.add(new Column("SOURCE_DATA_TYPE", null, Type.SHORT)); // Not used + columns.add(new Column("IS_AUTOINCREMENT", null, Type.KEYWORD)); + columns.add(new Column("IS_GENERATEDCOLUMN", null, Type.KEYWORD)); + + return columns; + } + + private List loadRows() { + List rows = new ArrayList<>(); + GetIndexResponse indexResponse = (GetIndexResponse) queryResult; + Map indexMappings = indexResponse.getMappings(); + + // Iterate through indices in indexMappings + for (Entry indexCursor : indexMappings.entrySet()) { + String index = indexCursor.getKey(); + + if (matchesPatternIfRegex(index, statement.getIndexPattern())) { + rows.addAll(loadIndexData(index, indexCursor.getValue().getSourceAsMap())); + } } - - private List loadColumns() { - List columns = new ArrayList<>(); - // Unused Columns are still included in Schema to match JDBC/ODBC standard - columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); - columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); - columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); - columns.add(new Column("COLUMN_NAME", null, Type.KEYWORD)); - columns.add(new Column("DATA_TYPE", null, Type.INTEGER)); - columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); - columns.add(new Column("COLUMN_SIZE", null, Type.INTEGER)); - columns.add(new Column("BUFFER_LENGTH", null, Type.INTEGER)); // Not used - columns.add(new Column("DECIMAL_DIGITS", null, Type.INTEGER)); - columns.add(new Column("NUM_PREC_RADIX", null, Type.INTEGER)); - columns.add(new Column("NULLABLE", null, Type.INTEGER)); - columns.add(new Column("REMARKS", null, Type.KEYWORD)); - columns.add(new Column("COLUMN_DEF", null, Type.KEYWORD)); - columns.add(new Column("SQL_DATA_TYPE", null, Type.INTEGER)); // Not used - columns.add(new Column("SQL_DATETIME_SUB", null, Type.INTEGER)); // Not used - columns.add(new Column("CHAR_OCTET_LENGTH", null, Type.INTEGER)); - columns.add(new Column("ORDINAL_POSITION", null, Type.INTEGER)); - columns.add(new Column("IS_NULLABLE", null, Type.KEYWORD)); - columns.add(new Column("SCOPE_CATALOG", null, Type.KEYWORD)); // Not used - columns.add(new Column("SCOPE_SCHEMA", null, Type.KEYWORD)); // Not used - columns.add(new Column("SCOPE_TABLE", null, Type.KEYWORD)); // Not used - columns.add(new Column("SOURCE_DATA_TYPE", null, Type.SHORT)); // Not used - columns.add(new Column("IS_AUTOINCREMENT", null, Type.KEYWORD)); - columns.add(new Column("IS_GENERATEDCOLUMN", null, Type.KEYWORD)); - - return columns; + return rows; + } + + @SuppressWarnings("unchecked") + private List loadIndexData(String index, Map mappingMetadata) { + List rows = new ArrayList<>(); + + Map flattenedMetaData = + flattenMappingMetaData(mappingMetadata, "", new HashMap<>()); + int position = 1; // Used as an arbitrary ORDINAL_POSITION value for the time being + for (Entry entry : flattenedMetaData.entrySet()) { + String columnPattern = statement.getColumnPattern(); + + // Check to see if column name matches pattern, if given + if (columnPattern == null || matchesPattern(entry.getKey(), columnPattern)) { + rows.add(new Row(loadRowData(index, entry.getKey(), entry.getValue(), position))); + position++; + } } - private List loadRows() { - List rows = new ArrayList<>(); - GetIndexResponse indexResponse = (GetIndexResponse) queryResult; - Map indexMappings = indexResponse.getMappings(); - - // Iterate through indices in indexMappings - for (Entry indexCursor : indexMappings.entrySet()) { - String index = indexCursor.getKey(); - - if (matchesPatternIfRegex(index, statement.getIndexPattern())) { - rows.addAll(loadIndexData(index, indexCursor.getValue().getSourceAsMap())); - } - } - return rows; - } - - @SuppressWarnings("unchecked") - private List loadIndexData(String index, Map mappingMetadata) { - List rows = new ArrayList<>(); - - Map flattenedMetaData = flattenMappingMetaData(mappingMetadata, "", new HashMap<>()); - int position = 1; // Used as an arbitrary ORDINAL_POSITION value for the time being - for (Entry entry : flattenedMetaData.entrySet()) { - String columnPattern = statement.getColumnPattern(); - - // Check to see if column name matches pattern, if given - if (columnPattern == null || matchesPattern(entry.getKey(), columnPattern)) { - rows.add( - new Row( - loadRowData(index, entry.getKey(), entry.getValue(), position) - ) - ); - position++; - } - } - - return rows; + return rows; + } + + private Map loadRowData(String index, String column, String type, int position) { + Map data = new HashMap<>(); + data.put("TABLE_CAT", clusterName); + data.put("TABLE_NAME", index); + data.put("COLUMN_NAME", column); + data.put("TYPE_NAME", type); + data.put("NUM_PREC_RADIX", DEFAULT_NUM_PREC_RADIX); + data.put("NULLABLE", 2); // TODO Defaulting to 2, need to find a way to check this + data.put("ORDINAL_POSITION", position); // There is no deterministic position of column in table + data.put("IS_NULLABLE", ""); // TODO Defaulting to unknown, need to check this + data.put("IS_AUTOINCREMENT", IS_AUTOINCREMENT); // Defaulting to "NO" + data.put("IS_GENERATEDCOLUMN", ""); // TODO Defaulting to unknown, need to check + + return data; + } + + /** + * To not disrupt old logic, for the time being, ShowQueryAction and DescribeQueryAction are using + * the same 'GetIndexRequestBuilder' that was used in the old ShowQueryAction. Since the format of + * the resulting meta data is different, this method is being used to flatten and retrieve types. + * + *

In the future, should look for a way to generalize this since Schema is currently using + * FieldMappingMetaData whereas here we are using MappingMetaData. + */ + @SuppressWarnings("unchecked") + private Map flattenMappingMetaData( + Map mappingMetaData, String currPath, Map flattenedMapping) { + Map properties = (Map) mappingMetaData.get("properties"); + for (Entry entry : properties.entrySet()) { + Map metaData = (Map) entry.getValue(); + + String fullPath = addToPath(currPath, entry.getKey()); + flattenedMapping.put( + fullPath, (String) metaData.getOrDefault("type", DEFAULT_OBJECT_DATATYPE)); + if (metaData.containsKey("properties")) { + flattenedMapping = flattenMappingMetaData(metaData, fullPath, flattenedMapping); + } } - private Map loadRowData(String index, String column, String type, int position) { - Map data = new HashMap<>(); - data.put("TABLE_CAT", clusterName); - data.put("TABLE_NAME", index); - data.put("COLUMN_NAME", column); - data.put("TYPE_NAME", type); - data.put("NUM_PREC_RADIX", DEFAULT_NUM_PREC_RADIX); - data.put("NULLABLE", 2); // TODO Defaulting to 2, need to find a way to check this - data.put("ORDINAL_POSITION", position); // There is no deterministic position of column in table - data.put("IS_NULLABLE", ""); // TODO Defaulting to unknown, need to check this - data.put("IS_AUTOINCREMENT", IS_AUTOINCREMENT); // Defaulting to "NO" - data.put("IS_GENERATEDCOLUMN", ""); // TODO Defaulting to unknown, need to check - - return data; - } + return flattenedMapping; + } - /** - * To not disrupt old logic, for the time being, ShowQueryAction and DescribeQueryAction are using the same - * 'GetIndexRequestBuilder' that was used in the old ShowQueryAction. Since the format of the resulting meta data - * is different, this method is being used to flatten and retrieve types. - *

- * In the future, should look for a way to generalize this since Schema is currently using FieldMappingMetaData - * whereas here we are using MappingMetaData. - */ - @SuppressWarnings("unchecked") - private Map flattenMappingMetaData(Map mappingMetaData, - String currPath, - Map flattenedMapping) { - Map properties = (Map) mappingMetaData.get("properties"); - for (Entry entry : properties.entrySet()) { - Map metaData = (Map) entry.getValue(); - - String fullPath = addToPath(currPath, entry.getKey()); - flattenedMapping.put(fullPath, (String) metaData.getOrDefault("type", DEFAULT_OBJECT_DATATYPE)); - if (metaData.containsKey("properties")) { - flattenedMapping = flattenMappingMetaData(metaData, fullPath, flattenedMapping); - } - } - - return flattenedMapping; + private String addToPath(String currPath, String field) { + if (currPath.isEmpty()) { + return field; } - private String addToPath(String currPath, String field) { - if (currPath.isEmpty()) { - return field; - } - - return currPath + "." + field; - } + return currPath + "." + field; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java index e8536567dd..f0ffafc470 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.io.IOException; @@ -41,219 +40,239 @@ import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; import org.opensearch.sql.legacy.query.planner.HashJoinQueryPlanRequestBuilder; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public abstract class ElasticJoinExecutor implements ElasticHitsExecutor { - private static final Logger LOG = LogManager.getLogger(); - - protected List results; // Keep list to avoid copy to new array in SearchHits - protected MetaSearchResult metaResults; - protected final int MAX_RESULTS_ON_ONE_FETCH = 10000; - private Set aliasesOnReturn; - private boolean allFieldsReturn; - - protected ElasticJoinExecutor(JoinRequestBuilder requestBuilder) { - metaResults = new MetaSearchResult(); - aliasesOnReturn = new HashSet<>(); - List firstTableReturnedField = requestBuilder.getFirstTable().getReturnedFields(); - List secondTableReturnedField = requestBuilder.getSecondTable().getReturnedFields(); - allFieldsReturn = (firstTableReturnedField == null || firstTableReturnedField.size() == 0) - && (secondTableReturnedField == null || secondTableReturnedField.size() == 0); - } - - public void sendResponse(RestChannel channel) throws IOException { - XContentBuilder builder = null; - long len; - try { - builder = ElasticUtils.hitsAsStringResultZeroCopy(results, metaResults, this); - BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, builder); - len = bytesRestResponse.content().length(); - channel.sendResponse(bytesRestResponse); - } catch (IOException e) { - try { - if (builder != null) { - builder.close(); - } - } catch (Exception ex) { - // Ignore. Already logged in channel - } - throw e; + private static final Logger LOG = LogManager.getLogger(); + + protected List results; // Keep list to avoid copy to new array in SearchHits + protected MetaSearchResult metaResults; + protected final int MAX_RESULTS_ON_ONE_FETCH = 10000; + private Set aliasesOnReturn; + private boolean allFieldsReturn; + + protected ElasticJoinExecutor(JoinRequestBuilder requestBuilder) { + metaResults = new MetaSearchResult(); + aliasesOnReturn = new HashSet<>(); + List firstTableReturnedField = requestBuilder.getFirstTable().getReturnedFields(); + List secondTableReturnedField = requestBuilder.getSecondTable().getReturnedFields(); + allFieldsReturn = + (firstTableReturnedField == null || firstTableReturnedField.size() == 0) + && (secondTableReturnedField == null || secondTableReturnedField.size() == 0); + } + + public void sendResponse(RestChannel channel) throws IOException { + XContentBuilder builder = null; + long len; + try { + builder = ElasticUtils.hitsAsStringResultZeroCopy(results, metaResults, this); + BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, builder); + len = bytesRestResponse.content().length(); + channel.sendResponse(bytesRestResponse); + } catch (IOException e) { + try { + if (builder != null) { + builder.close(); } - LOG.debug("[MCB] Successfully send response with size of {}. Thread id = {}", len, - Thread.currentThread().getId()); - } - - public void run() throws IOException, SqlParseException { - long timeBefore = System.currentTimeMillis(); - results = innerRun(); - long joinTimeInMilli = System.currentTimeMillis() - timeBefore; - this.metaResults.setTookImMilli(joinTimeInMilli); + } catch (Exception ex) { + // Ignore. Already logged in channel + } + throw e; } - - - protected abstract List innerRun() throws IOException, SqlParseException; - - public SearchHits getHits() { - return new SearchHits(results.toArray(new SearchHit[results.size()]), new TotalHits(results.size(), - Relation.EQUAL_TO), 1.0f); + LOG.debug( + "[MCB] Successfully send response with size of {}. Thread id = {}", + len, + Thread.currentThread().getId()); + } + + public void run() throws IOException, SqlParseException { + long timeBefore = System.currentTimeMillis(); + results = innerRun(); + long joinTimeInMilli = System.currentTimeMillis() - timeBefore; + this.metaResults.setTookImMilli(joinTimeInMilli); + } + + protected abstract List innerRun() throws IOException, SqlParseException; + + public SearchHits getHits() { + return new SearchHits( + results.toArray(new SearchHit[results.size()]), + new TotalHits(results.size(), Relation.EQUAL_TO), + 1.0f); + } + + public static ElasticJoinExecutor createJoinExecutor( + Client client, SqlElasticRequestBuilder requestBuilder) { + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + return new QueryPlanElasticExecutor((HashJoinQueryPlanRequestBuilder) requestBuilder); + } else if (requestBuilder instanceof HashJoinElasticRequestBuilder) { + HashJoinElasticRequestBuilder hashJoin = (HashJoinElasticRequestBuilder) requestBuilder; + return new HashJoinElasticExecutor(client, hashJoin); + } else if (requestBuilder instanceof NestedLoopsElasticRequestBuilder) { + NestedLoopsElasticRequestBuilder nestedLoops = + (NestedLoopsElasticRequestBuilder) requestBuilder; + return new NestedLoopsElasticExecutor(client, nestedLoops); + } else { + throw new RuntimeException("Unsuported requestBuilder of type: " + requestBuilder.getClass()); } - - public static ElasticJoinExecutor createJoinExecutor(Client client, SqlElasticRequestBuilder requestBuilder) { - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - return new QueryPlanElasticExecutor((HashJoinQueryPlanRequestBuilder) requestBuilder); - } else if (requestBuilder instanceof HashJoinElasticRequestBuilder) { - HashJoinElasticRequestBuilder hashJoin = (HashJoinElasticRequestBuilder) requestBuilder; - return new HashJoinElasticExecutor(client, hashJoin); - } else if (requestBuilder instanceof NestedLoopsElasticRequestBuilder) { - NestedLoopsElasticRequestBuilder nestedLoops = (NestedLoopsElasticRequestBuilder) requestBuilder; - return new NestedLoopsElasticExecutor(client, nestedLoops); - } else { - throw new RuntimeException("Unsuported requestBuilder of type: " + requestBuilder.getClass()); - } + } + + protected void mergeSourceAndAddAliases( + Map secondTableHitSource, + SearchHit searchHit, + String t1Alias, + String t2Alias) { + Map results = mapWithAliases(searchHit.getSourceAsMap(), t1Alias); + results.putAll(mapWithAliases(secondTableHitSource, t2Alias)); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(results); + } + + protected Map mapWithAliases(Map source, String alias) { + Map mapWithAliases = new HashMap<>(); + for (Map.Entry fieldNameToValue : source.entrySet()) { + if (!aliasesOnReturn.contains(fieldNameToValue.getKey())) { + mapWithAliases.put(alias + "." + fieldNameToValue.getKey(), fieldNameToValue.getValue()); + } else { + mapWithAliases.put(fieldNameToValue.getKey(), fieldNameToValue.getValue()); + } } - - protected void mergeSourceAndAddAliases(Map secondTableHitSource, SearchHit searchHit, - String t1Alias, String t2Alias) { - Map results = mapWithAliases(searchHit.getSourceAsMap(), t1Alias); - results.putAll(mapWithAliases(secondTableHitSource, t2Alias)); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(results); + return mapWithAliases; + } + + protected void onlyReturnedFields( + Map fieldsMap, List required, boolean allRequired) { + HashMap filteredMap = new HashMap<>(); + if (allFieldsReturn || allRequired) { + filteredMap.putAll(fieldsMap); + return; } - - protected Map mapWithAliases(Map source, String alias) { - Map mapWithAliases = new HashMap<>(); - for (Map.Entry fieldNameToValue : source.entrySet()) { - if (!aliasesOnReturn.contains(fieldNameToValue.getKey())) { - mapWithAliases.put(alias + "." + fieldNameToValue.getKey(), fieldNameToValue.getValue()); - } else { - mapWithAliases.put(fieldNameToValue.getKey(), fieldNameToValue.getValue()); - } - } - return mapWithAliases; + for (Field field : required) { + String name = field.getName(); + String returnName = name; + String alias = field.getAlias(); + if (alias != null && alias != "") { + returnName = alias; + aliasesOnReturn.add(alias); + } + filteredMap.put(returnName, deepSearchInMap(fieldsMap, name)); } - - protected void onlyReturnedFields(Map fieldsMap, List required, boolean allRequired) { - HashMap filteredMap = new HashMap<>(); - if (allFieldsReturn || allRequired) { - filteredMap.putAll(fieldsMap); - return; + fieldsMap.clear(); + fieldsMap.putAll(filteredMap); + } + + protected Object deepSearchInMap(Map fieldsMap, String name) { + if (name.contains(".")) { + String[] path = name.split("\\."); + Map currentObject = fieldsMap; + for (int i = 0; i < path.length - 1; i++) { + Object valueFromCurrentMap = currentObject.get(path[i]); + if (valueFromCurrentMap == null) { + return null; } - for (Field field : required) { - String name = field.getName(); - String returnName = name; - String alias = field.getAlias(); - if (alias != null && alias != "") { - returnName = alias; - aliasesOnReturn.add(alias); - } - filteredMap.put(returnName, deepSearchInMap(fieldsMap, name)); + if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { + return null; } - fieldsMap.clear(); - fieldsMap.putAll(filteredMap); - + currentObject = (Map) valueFromCurrentMap; + } + return currentObject.get(path[path.length - 1]); } - protected Object deepSearchInMap(Map fieldsMap, String name) { - if (name.contains(".")) { - String[] path = name.split("\\."); - Map currentObject = fieldsMap; - for (int i = 0; i < path.length - 1; i++) { - Object valueFromCurrentMap = currentObject.get(path[i]); - if (valueFromCurrentMap == null) { - return null; - } - if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { - return null; - } - currentObject = (Map) valueFromCurrentMap; - } - return currentObject.get(path[path.length - 1]); + return fieldsMap.get(name); + } + + protected void addUnmatchedResults( + List combinedResults, + Collection firstTableSearchHits, + List secondTableReturnedFields, + int currentNumOfIds, + int totalLimit, + String t1Alias, + String t2Alias) { + boolean limitReached = false; + for (SearchHitsResult hitsResult : firstTableSearchHits) { + if (!hitsResult.isMatchedWithOtherTable()) { + for (SearchHit hit : hitsResult.getSearchHits()) { + + // todo: decide which id to put or type. or maby its ok this way. just need to doc. + SearchHit unmachedResult = + createUnmachedResult(secondTableReturnedFields, hit.docId(), t1Alias, t2Alias, hit); + combinedResults.add(unmachedResult); + currentNumOfIds++; + if (currentNumOfIds >= totalLimit) { + limitReached = true; + break; + } } - - return fieldsMap.get(name); + } + if (limitReached) { + break; + } } - - - protected void addUnmatchedResults(List combinedResults, - Collection firstTableSearchHits, - List secondTableReturnedFields, int currentNumOfIds, int totalLimit, - String t1Alias, String t2Alias) { - boolean limitReached = false; - for (SearchHitsResult hitsResult : firstTableSearchHits) { - if (!hitsResult.isMatchedWithOtherTable()) { - for (SearchHit hit : hitsResult.getSearchHits()) { - - //todo: decide which id to put or type. or maby its ok this way. just need to doc. - SearchHit unmachedResult = createUnmachedResult(secondTableReturnedFields, hit.docId(), - t1Alias, t2Alias, hit); - combinedResults.add(unmachedResult); - currentNumOfIds++; - if (currentNumOfIds >= totalLimit) { - limitReached = true; - break; - } - - } - } - if (limitReached) { - break; - } - } + } + + protected SearchHit createUnmachedResult( + List secondTableReturnedFields, + int docId, + String t1Alias, + String t2Alias, + SearchHit hit) { + String unmatchedId = hit.getId() + "|0"; + + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = new SearchHit(docId, unmatchedId, documentFields, metaFields); + + searchHit.sourceRef(hit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(hit.getSourceAsMap()); + Map emptySecondTableHitSource = createNullsSource(secondTableReturnedFields); + + mergeSourceAndAddAliases(emptySecondTableHitSource, searchHit, t1Alias, t2Alias); + + return searchHit; + } + + protected Map createNullsSource(List secondTableReturnedFields) { + Map nulledSource = new HashMap<>(); + for (Field field : secondTableReturnedFields) { + if (!field.getName().equals("*")) { + nulledSource.put(field.getName(), null); + } } - - protected SearchHit createUnmachedResult(List secondTableReturnedFields, int docId, String t1Alias, - String t2Alias, SearchHit hit) { - String unmatchedId = hit.getId() + "|0"; - - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(docId, unmatchedId, documentFields, metaFields); - - searchHit.sourceRef(hit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(hit.getSourceAsMap()); - Map emptySecondTableHitSource = createNullsSource(secondTableReturnedFields); - - mergeSourceAndAddAliases(emptySecondTableHitSource, searchHit, t1Alias, t2Alias); - - return searchHit; + return nulledSource; + } + + protected void updateMetaSearchResults(SearchResponse searchResponse) { + this.metaResults.addSuccessfulShards(searchResponse.getSuccessfulShards()); + this.metaResults.addFailedShards(searchResponse.getFailedShards()); + this.metaResults.addTotalNumOfShards(searchResponse.getTotalShards()); + this.metaResults.updateTimeOut(searchResponse.isTimedOut()); + } + + protected SearchResponse scrollOneTimeWithMax( + Client client, TableInJoinRequestBuilder tableRequest) { + SearchRequestBuilder scrollRequest = + tableRequest + .getRequestBuilder() + .setScroll(new TimeValue(60000)) + .setSize(MAX_RESULTS_ON_ONE_FETCH); + boolean ordered = tableRequest.getOriginalSelect().isOrderdSelect(); + if (!ordered) { + scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); } - - protected Map createNullsSource(List secondTableReturnedFields) { - Map nulledSource = new HashMap<>(); - for (Field field : secondTableReturnedFields) { - if (!field.getName().equals("*")) { - nulledSource.put(field.getName(), null); - } - } - return nulledSource; - } - - protected void updateMetaSearchResults(SearchResponse searchResponse) { - this.metaResults.addSuccessfulShards(searchResponse.getSuccessfulShards()); - this.metaResults.addFailedShards(searchResponse.getFailedShards()); - this.metaResults.addTotalNumOfShards(searchResponse.getTotalShards()); - this.metaResults.updateTimeOut(searchResponse.isTimedOut()); - } - - protected SearchResponse scrollOneTimeWithMax(Client client, TableInJoinRequestBuilder tableRequest) { - SearchRequestBuilder scrollRequest = tableRequest.getRequestBuilder() - .setScroll(new TimeValue(60000)).setSize(MAX_RESULTS_ON_ONE_FETCH); - boolean ordered = tableRequest.getOriginalSelect().isOrderdSelect(); - if (!ordered) { - scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); - } - SearchResponse responseWithHits = scrollRequest.get(); - //on ordered select - not using SCAN , elastic returns hits on first scroll - //es5.0 elastic always return docs on scan - // if(!ordered) - // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) - // .setScroll(new TimeValue(600000)).get(); - return responseWithHits; - } - - + SearchResponse responseWithHits = scrollRequest.get(); + // on ordered select - not using SCAN , elastic returns hits on first scroll + // es5.0 elastic always return docs on scan + // if(!ordered) + // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) + // .setScroll(new TimeValue(600000)).get(); + return responseWithHits; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java index 766ecd3692..fa3514600b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import com.google.common.base.Joiner; @@ -14,72 +13,70 @@ import org.opensearch.search.SearchHit; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 9/9/2016. - */ +/** Created by Eliran on 9/9/2016. */ public class ComperableHitResult { - private SearchHit hit; - private String comperator; - private boolean isAllNull; - private Map flattenMap; + private SearchHit hit; + private String comperator; + private boolean isAllNull; + private Map flattenMap; - public ComperableHitResult(SearchHit hit, String[] fieldsOrder, String seperator) { - this.hit = hit; - Map hitAsMap = hit.getSourceAsMap(); - this.flattenMap = new HashMap<>(); - List results = new ArrayList<>(); - this.isAllNull = true; + public ComperableHitResult(SearchHit hit, String[] fieldsOrder, String seperator) { + this.hit = hit; + Map hitAsMap = hit.getSourceAsMap(); + this.flattenMap = new HashMap<>(); + List results = new ArrayList<>(); + this.isAllNull = true; - for (int i = 0; i < fieldsOrder.length; i++) { - String field = fieldsOrder[i]; - Object result = Util.deepSearchInMap(hitAsMap, field); - if (result == null) { - results.add(""); - } else { - this.isAllNull = false; - results.add(result.toString()); - this.flattenMap.put(field, result); - } - } - this.comperator = Joiner.on(seperator).join(results); + for (int i = 0; i < fieldsOrder.length; i++) { + String field = fieldsOrder[i]; + Object result = Util.deepSearchInMap(hitAsMap, field); + if (result == null) { + results.add(""); + } else { + this.isAllNull = false; + results.add(result.toString()); + this.flattenMap.put(field, result); + } } + this.comperator = Joiner.on(seperator).join(results); + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ComperableHitResult that = (ComperableHitResult) o; + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } - if (!comperator.equals(that.comperator)) { - return false; - } + ComperableHitResult that = (ComperableHitResult) o; - return true; + if (!comperator.equals(that.comperator)) { + return false; } - public boolean isAllNull() { - return isAllNull; - } + return true; + } - @Override - public int hashCode() { - return comperator.hashCode(); - } + public boolean isAllNull() { + return isAllNull; + } - public String getComperator() { - return comperator; - } + @Override + public int hashCode() { + return comperator.hashCode(); + } - public Map getFlattenMap() { - return flattenMap; - } + public String getComperator() { + return comperator; + } - public SearchHit getOriginalHit() { - return hit; - } + public Map getFlattenMap() { + return flattenMap; + } + + public SearchHit getOriginalHit() { + return hit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java index afa6f6c439..c1de63fe88 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import org.opensearch.sql.legacy.expression.core.operator.BinaryScalarOperator; @@ -12,205 +11,130 @@ import org.opensearch.sql.legacy.expression.core.operator.ScalarOperation; import org.opensearch.sql.legacy.expression.core.operator.UnaryScalarOperator; -/** - * The definition of arithmetic function builder factory. - */ +/** The definition of arithmetic function builder factory. */ public class ArithmeticFunctionFactory { - public static ExpressionBuilder add() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.ADD, - Math::addExact, - Math::addExact, - Double::sum, - Float::sum)); - } - - public static ExpressionBuilder subtract() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.ADD, - Math::subtractExact, - Math::subtractExact, - (v1, v2) -> v1 - v2, - (v1, v2) -> v1 - v2)); - } - - public static ExpressionBuilder multiply() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.MULTIPLY, - Math::multiplyExact, - Math::multiplyExact, - (v1, v2) -> v1 * v2, - (v1, v2) -> v1 * v2 - )); - } - - public static ExpressionBuilder divide() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.DIVIDE, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2 - )); - } - - public static ExpressionBuilder modules() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.MODULES, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2 - )); - } - - public static ExpressionBuilder abs() { - return new UnaryExpressionBuilder( - new UnaryScalarOperator( - ScalarOperation.ABS, - Math::abs, - Math::abs, - Math::abs, - Math::abs - )); - } - - public static ExpressionBuilder acos() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.ACOS, - Math::acos - )); - } - - public static ExpressionBuilder asin() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.ASIN, - Math::asin - ) - ); - } - - public static ExpressionBuilder atan() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.ATAN, - Math::atan - ) - ); - } - - public static ExpressionBuilder atan2() { - return new BinaryExpressionBuilder( - new DoubleBinaryScalarOperator( - ScalarOperation.ATAN2, - Math::atan2 - ) - ); - } - - public static ExpressionBuilder tan() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.TAN, - Math::tan - ) - ); - } - - public static ExpressionBuilder cbrt() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.CBRT, - Math::cbrt - ) - ); - } - - public static ExpressionBuilder ceil() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.CEIL, - Math::ceil - ) - ); - } - - public static ExpressionBuilder cos() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.COS, - Math::cos - ) - ); - } - - public static ExpressionBuilder cosh() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.COSH, - Math::cosh - ) - ); - } - - public static ExpressionBuilder exp() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.EXP, - Math::exp - ) - ); - } - - public static ExpressionBuilder floor() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.FLOOR, - Math::floor - ) - ); - } - - public static ExpressionBuilder ln() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LN, - Math::log - ) - ); - } - - public static ExpressionBuilder log() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LOG, - Math::log - ) - ); - } - - public static ExpressionBuilder log2() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LOG2, - (x) -> Math.log(x) / Math.log(2d) - ) - ); - } - - public static ExpressionBuilder log10() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LOG10, - Math::log10 - ) - ); - } + public static ExpressionBuilder add() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.ADD, Math::addExact, Math::addExact, Double::sum, Float::sum)); + } + + public static ExpressionBuilder subtract() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.ADD, + Math::subtractExact, + Math::subtractExact, + (v1, v2) -> v1 - v2, + (v1, v2) -> v1 - v2)); + } + + public static ExpressionBuilder multiply() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.MULTIPLY, + Math::multiplyExact, + Math::multiplyExact, + (v1, v2) -> v1 * v2, + (v1, v2) -> v1 * v2)); + } + + public static ExpressionBuilder divide() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.DIVIDE, + (v1, v2) -> v1 / v2, + (v1, v2) -> v1 / v2, + (v1, v2) -> v1 / v2, + (v1, v2) -> v1 / v2)); + } + + public static ExpressionBuilder modules() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.MODULES, + (v1, v2) -> v1 % v2, + (v1, v2) -> v1 % v2, + (v1, v2) -> v1 % v2, + (v1, v2) -> v1 % v2)); + } + + public static ExpressionBuilder abs() { + return new UnaryExpressionBuilder( + new UnaryScalarOperator(ScalarOperation.ABS, Math::abs, Math::abs, Math::abs, Math::abs)); + } + + public static ExpressionBuilder acos() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.ACOS, Math::acos)); + } + + public static ExpressionBuilder asin() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.ASIN, Math::asin)); + } + + public static ExpressionBuilder atan() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.ATAN, Math::atan)); + } + + public static ExpressionBuilder atan2() { + return new BinaryExpressionBuilder( + new DoubleBinaryScalarOperator(ScalarOperation.ATAN2, Math::atan2)); + } + + public static ExpressionBuilder tan() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.TAN, Math::tan)); + } + + public static ExpressionBuilder cbrt() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.CBRT, Math::cbrt)); + } + + public static ExpressionBuilder ceil() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.CEIL, Math::ceil)); + } + + public static ExpressionBuilder cos() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.COS, Math::cos)); + } + + public static ExpressionBuilder cosh() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.COSH, Math::cosh)); + } + + public static ExpressionBuilder exp() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.EXP, Math::exp)); + } + + public static ExpressionBuilder floor() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.FLOOR, Math::floor)); + } + + public static ExpressionBuilder ln() { + return new UnaryExpressionBuilder(new DoubleUnaryScalarOperator(ScalarOperation.LN, Math::log)); + } + + public static ExpressionBuilder log() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.LOG, Math::log)); + } + + public static ExpressionBuilder log2() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.LOG2, (x) -> Math.log(x) / Math.log(2d))); + } + + public static ExpressionBuilder log10() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.LOG10, Math::log10)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java index 99ddd50248..fcf08180a5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import java.util.Arrays; @@ -14,33 +13,32 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * The definition of the Expression Builder which has two arguments. - */ +/** The definition of the Expression Builder which has two arguments. */ @RequiredArgsConstructor public class BinaryExpressionBuilder implements ExpressionBuilder { - private final ScalarOperator op; + private final ScalarOperator op; - /** - * Build the expression with two {@link Expression} as arguments. - * @param expressionList expression list. - * @return expression. - */ - @Override - public Expression build(List expressionList) { - Expression e1 = expressionList.get(0); - Expression e2 = expressionList.get(1); + /** + * Build the expression with two {@link Expression} as arguments. + * + * @param expressionList expression list. + * @return expression. + */ + @Override + public Expression build(List expressionList) { + Expression e1 = expressionList.get(0); + Expression e2 = expressionList.get(1); - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return op.apply(Arrays.asList(e1.valueOf(tuple), e2.valueOf(tuple))); - } + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return op.apply(Arrays.asList(e1.valueOf(tuple), e2.valueOf(tuple))); + } - @Override - public String toString() { - return String.format("%s(%s,%s)", op.name(), e1, e2); - } - }; - } + @Override + public String toString() { + return String.format("%s(%s,%s)", op.name(), e1, e2); + } + }; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java index 70d47a3e83..02d29e1ed9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValue.ExprValueKind.DOUBLE_VALUE; @@ -24,54 +23,53 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Binary Scalar Operator take two {@link ExprValue} as arguments ans return one {@link ExprValue} as result. + * Binary Scalar Operator take two {@link ExprValue} as arguments ans return one {@link ExprValue} + * as result. */ @RequiredArgsConstructor public class BinaryScalarOperator implements ScalarOperator { - private static final Map numberTypeOrder = - new ImmutableMap.Builder() - .put(INTEGER_VALUE, 0) - .put(LONG_VALUE, 1) - .put(DOUBLE_VALUE, 2) - .put(FLOAT_VALUE, 3) - .build(); + private static final Map numberTypeOrder = + new ImmutableMap.Builder() + .put(INTEGER_VALUE, 0) + .put(LONG_VALUE, 1) + .put(DOUBLE_VALUE, 2) + .put(FLOAT_VALUE, 3) + .build(); - private final ScalarOperation op; - private final BiFunction integerFunc; - private final BiFunction longFunc; - private final BiFunction doubleFunc; - private final BiFunction floatFunc; + private final ScalarOperation op; + private final BiFunction integerFunc; + private final BiFunction longFunc; + private final BiFunction doubleFunc; + private final BiFunction floatFunc; - @Override - public ExprValue apply(List valueList) { - ExprValue v1 = valueList.get(0); - ExprValue v2 = valueList.get(1); - if (!numberTypeOrder.containsKey(v1.kind()) || !numberTypeOrder.containsKey(v2.kind())) { - throw new RuntimeException( - String.format("unexpected operation type: %s(%s, %s) ", op.name(), v1.kind(), v2.kind())); - } - ExprValue.ExprValueKind expectedType = numberTypeOrder.get(v1.kind()) > numberTypeOrder.get(v2.kind()) - ? v1.kind() : v2.kind(); - switch (expectedType) { - case DOUBLE_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(v1), getDoubleValue(v2))); - case INTEGER_VALUE: - return ExprValueFactory - .from(integerFunc.apply(getIntegerValue(v1), getIntegerValue(v2))); - case LONG_VALUE: - return ExprValueFactory - .from(longFunc.apply(getLongValue(v1), getLongValue(v2))); - case FLOAT_VALUE: - return ExprValueFactory - .from(floatFunc.apply(getFloatValue(v1), getFloatValue(v2))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s, %s)", op.name(), v1.kind(), - v2.kind())); - } + @Override + public ExprValue apply(List valueList) { + ExprValue v1 = valueList.get(0); + ExprValue v2 = valueList.get(1); + if (!numberTypeOrder.containsKey(v1.kind()) || !numberTypeOrder.containsKey(v2.kind())) { + throw new RuntimeException( + String.format("unexpected operation type: %s(%s, %s) ", op.name(), v1.kind(), v2.kind())); } - - @Override - public String name() { - return op.name(); + ExprValue.ExprValueKind expectedType = + numberTypeOrder.get(v1.kind()) > numberTypeOrder.get(v2.kind()) ? v1.kind() : v2.kind(); + switch (expectedType) { + case DOUBLE_VALUE: + return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(v1), getDoubleValue(v2))); + case INTEGER_VALUE: + return ExprValueFactory.from(integerFunc.apply(getIntegerValue(v1), getIntegerValue(v2))); + case LONG_VALUE: + return ExprValueFactory.from(longFunc.apply(getLongValue(v1), getLongValue(v2))); + case FLOAT_VALUE: + return ExprValueFactory.from(floatFunc.apply(getFloatValue(v1), getFloatValue(v2))); + default: + throw new RuntimeException( + String.format( + "unexpected operation type: %s(%s, %s)", op.name(), v1.kind(), v2.kind())); } + } + + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java index 2555b2a53c..12e7aacbaa 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getDoubleValue; @@ -16,37 +15,41 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Double Binary Scalar Operator take two {@link ExprValue} which have double value as arguments ans return one - * {@link ExprDoubleValue} as result. + * Double Binary Scalar Operator take two {@link ExprValue} which have double value as arguments ans + * return one {@link ExprDoubleValue} as result. */ @RequiredArgsConstructor public class DoubleBinaryScalarOperator implements ScalarOperator { - private final ScalarOperation op; - private final BiFunction doubleFunc; + private final ScalarOperation op; + private final BiFunction doubleFunc; - @Override - public ExprValue apply(List exprValues) { - ExprValue exprValue1 = exprValues.get(0); - ExprValue exprValue2 = exprValues.get(1); - if (exprValue1.kind() != exprValue2.kind()) { - throw new RuntimeException(String.format("unexpected operation type: %s(%s,%s)", op.name(), - exprValue1.kind(), exprValue2.kind())); - } - switch (exprValue1.kind()) { - case DOUBLE_VALUE: - case INTEGER_VALUE: - case LONG_VALUE: - case FLOAT_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue1), - getDoubleValue(exprValue2))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s,%s)", op.name(), - exprValue1.kind(), exprValue2.kind())); - } + @Override + public ExprValue apply(List exprValues) { + ExprValue exprValue1 = exprValues.get(0); + ExprValue exprValue2 = exprValues.get(1); + if (exprValue1.kind() != exprValue2.kind()) { + throw new RuntimeException( + String.format( + "unexpected operation type: %s(%s,%s)", + op.name(), exprValue1.kind(), exprValue2.kind())); } - - @Override - public String name() { - return op.name(); + switch (exprValue1.kind()) { + case DOUBLE_VALUE: + case INTEGER_VALUE: + case LONG_VALUE: + case FLOAT_VALUE: + return ExprValueFactory.from( + doubleFunc.apply(getDoubleValue(exprValue1), getDoubleValue(exprValue2))); + default: + throw new RuntimeException( + String.format( + "unexpected operation type: %s(%s,%s)", + op.name(), exprValue1.kind(), exprValue2.kind())); } + } + + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java index 736216472f..8242eee8a6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getDoubleValue; @@ -16,31 +15,31 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Unary Binary Scalar Operator take one {@link ExprValue} which have double value as arguments ans return one - * {@link ExprDoubleValue} as result. + * Unary Binary Scalar Operator take one {@link ExprValue} which have double value as arguments ans + * return one {@link ExprDoubleValue} as result. */ @RequiredArgsConstructor public class DoubleUnaryScalarOperator implements ScalarOperator { - private final ScalarOperation op; - private final Function doubleFunc; + private final ScalarOperation op; + private final Function doubleFunc; - @Override - public ExprValue apply(List exprValues) { - ExprValue exprValue = exprValues.get(0); - switch (exprValue.kind()) { - case DOUBLE_VALUE: - case INTEGER_VALUE: - case LONG_VALUE: - case FLOAT_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s)", - op.name(), exprValue.kind())); - } + @Override + public ExprValue apply(List exprValues) { + ExprValue exprValue = exprValues.get(0); + switch (exprValue.kind()) { + case DOUBLE_VALUE: + case INTEGER_VALUE: + case LONG_VALUE: + case FLOAT_VALUE: + return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); + default: + throw new RuntimeException( + String.format("unexpected operation type: %s(%s)", op.name(), exprValue.kind())); } + } - @Override - public String name() { - return op.name(); - } + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java index badc7c8355..328f63b7ca 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.domain; - import java.util.Map; import java.util.stream.Collectors; import lombok.Builder; @@ -19,42 +17,41 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * BindingTuple represents the a relationship between bindingName and ExprValue. - * e.g. The operation output column name is bindingName, the value is the ExprValue. + * BindingTuple represents the a relationship between bindingName and ExprValue. e.g. The operation + * output column name is bindingName, the value is the ExprValue. */ @Builder @Getter @EqualsAndHashCode public class BindingTuple { - @Singular("binding") - private final Map bindingMap; - - /** - * Resolve the Binding Name in BindingTuple context. - * - * @param bindingName binding name. - * @return binding value. - */ - public ExprValue resolve(String bindingName) { - return bindingMap.getOrDefault(bindingName, new ExprMissingValue()); - } - - @Override - public String toString() { - return bindingMap.entrySet() - .stream() - .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) - .collect(Collectors.joining(",", "<", ">")); - } - - public static BindingTuple from(Map map) { - return from(new JSONObject(map)); - } - - public static BindingTuple from(JSONObject json) { - Map map = json.toMap(); - BindingTupleBuilder bindingTupleBuilder = BindingTuple.builder(); - map.forEach((key, value) -> bindingTupleBuilder.binding(key, ExprValueFactory.from(value))); - return bindingTupleBuilder.build(); - } + @Singular("binding") + private final Map bindingMap; + + /** + * Resolve the Binding Name in BindingTuple context. + * + * @param bindingName binding name. + * @return binding value. + */ + public ExprValue resolve(String bindingName) { + return bindingMap.getOrDefault(bindingName, new ExprMissingValue()); + } + + @Override + public String toString() { + return bindingMap.entrySet().stream() + .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",", "<", ">")); + } + + public static BindingTuple from(Map map) { + return from(new JSONObject(map)); + } + + public static BindingTuple from(JSONObject json) { + Map map = json.toMap(); + BindingTupleBuilder bindingTupleBuilder = BindingTuple.builder(); + map.forEach((key, value) -> bindingTupleBuilder.binding(key, ExprValueFactory.from(value))); + return bindingTupleBuilder.build(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java index 8bb15eeb74..88d5f817e8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java @@ -3,32 +3,31 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.util.concurrent.atomic.LongAdder; public class BasicCounter implements Counter { - private LongAdder count = new LongAdder(); + private LongAdder count = new LongAdder(); - @Override - public void increment() { - count.increment(); - } + @Override + public void increment() { + count.increment(); + } - @Override - public void add(long n) { - count.add(n); - } + @Override + public void add(long n) { + count.add(n); + } - @Override - public Long getValue() { - return count.longValue(); - } + @Override + public Long getValue() { + return count.longValue(); + } - @Override - public void reset() { - count.reset(); - } + @Override + public void reset() { + count.reset(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java index 7d490704e8..f91731ab0e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java @@ -3,16 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; public interface Counter { - void increment(); + void increment(); - void add(long n); + void add(long n); - T getValue(); + T getValue(); - void reset(); + void reset(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java index c711ee2929..d55ee64601 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -19,101 +18,119 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by allwefantasy on 9/3/16. - */ +/** Created by allwefantasy on 9/3/16. */ public class CaseWhenParser { - private SQLCaseExpr caseExpr; - private String alias; - private String tableAlias; - - public CaseWhenParser(SQLCaseExpr caseExpr, String alias, String tableAlias) { - this.alias = alias; - this.tableAlias = tableAlias; - this.caseExpr = caseExpr; + private SQLCaseExpr caseExpr; + private String alias; + private String tableAlias; + + public CaseWhenParser(SQLCaseExpr caseExpr, String alias, String tableAlias) { + this.alias = alias; + this.tableAlias = tableAlias; + this.caseExpr = caseExpr; + } + + public String parse() throws SqlParseException { + List result = new ArrayList<>(); + + if (caseExpr.getValueExpr() != null) { + for (SQLCaseExpr.Item item : caseExpr.getItems()) { + SQLExpr left = caseExpr.getValueExpr(); + SQLExpr right = item.getConditionExpr(); + SQLBinaryOpExpr conditionExpr = + new SQLBinaryOpExpr(left, SQLBinaryOperator.Equality, right); + item.setConditionExpr(conditionExpr); + } + caseExpr.setValueExpr(null); } - public String parse() throws SqlParseException { - List result = new ArrayList<>(); - - if (caseExpr.getValueExpr() != null) { - for (SQLCaseExpr.Item item : caseExpr.getItems()) { - SQLExpr left = caseExpr.getValueExpr(); - SQLExpr right = item.getConditionExpr(); - SQLBinaryOpExpr conditionExpr = new SQLBinaryOpExpr(left, SQLBinaryOperator.Equality, right); - item.setConditionExpr(conditionExpr); - } - caseExpr.setValueExpr(null); - } - - for (SQLCaseExpr.Item item : caseExpr.getItems()) { - SQLExpr conditionExpr = item.getConditionExpr(); - - WhereParser parser = new WhereParser(new SqlParser(), conditionExpr); - String scriptCode = explain(parser.findWhere()); - if (scriptCode.startsWith(" &&")) { - scriptCode = scriptCode.substring(3); - } - if (result.size() == 0) { - result.add("if(" + scriptCode + ")" + "{" + Util.getScriptValueWithQuote(item.getValueExpr(), - "'") + "}"); - } else { - result.add("else if(" + scriptCode + ")" + "{" + Util.getScriptValueWithQuote(item.getValueExpr(), - "'") + "}"); - } - - } - SQLExpr elseExpr = caseExpr.getElseExpr(); - if (elseExpr == null) { - result.add("else { null }"); - } else { - result.add("else {" + Util.getScriptValueWithQuote(elseExpr, "'") + "}"); - } - - - return Joiner.on(" ").join(result); + for (SQLCaseExpr.Item item : caseExpr.getItems()) { + SQLExpr conditionExpr = item.getConditionExpr(); + + WhereParser parser = new WhereParser(new SqlParser(), conditionExpr); + String scriptCode = explain(parser.findWhere()); + if (scriptCode.startsWith(" &&")) { + scriptCode = scriptCode.substring(3); + } + if (result.size() == 0) { + result.add( + "if(" + + scriptCode + + ")" + + "{" + + Util.getScriptValueWithQuote(item.getValueExpr(), "'") + + "}"); + } else { + result.add( + "else if(" + + scriptCode + + ")" + + "{" + + Util.getScriptValueWithQuote(item.getValueExpr(), "'") + + "}"); + } } - - public String explain(Where where) throws SqlParseException { - List codes = new ArrayList<>(); - while (where.getWheres().size() == 1) { - where = where.getWheres().getFirst(); - } - explainWhere(codes, where); - String relation = where.getConn().name().equals("AND") ? " && " : " || "; - return Joiner.on(relation).join(codes); + SQLExpr elseExpr = caseExpr.getElseExpr(); + if (elseExpr == null) { + result.add("else { null }"); + } else { + result.add("else {" + Util.getScriptValueWithQuote(elseExpr, "'") + "}"); } + return Joiner.on(" ").join(result); + } - private void explainWhere(List codes, Where where) throws SqlParseException { - if (where instanceof Condition) { - Condition condition = (Condition) where; - - if (condition.getValue() instanceof ScriptFilter) { - codes.add("(" + ((ScriptFilter) condition.getValue()).getScript() + ")"); - } else if (condition.getOPERATOR() == Condition.OPERATOR.BETWEEN) { - Object[] objs = (Object[]) condition.getValue(); - codes.add("(" + "doc['" + condition.getName() + "'].value >= " + objs[0] + " && doc['" - + condition.getName() + "'].value <=" + objs[1] + ")"); - } else { - SQLExpr nameExpr = condition.getNameExpr(); - SQLExpr valueExpr = condition.getValueExpr(); - if (valueExpr instanceof SQLNullExpr) { - codes.add("(" + "doc['" + nameExpr.toString() + "']" + ".empty)"); - } else { - codes.add("(" + Util.getScriptValueWithQuote(nameExpr, "'") + condition.getOpertatorSymbol() - + Util.getScriptValueWithQuote(valueExpr, "'") + ")"); - } - } + public String explain(Where where) throws SqlParseException { + List codes = new ArrayList<>(); + while (where.getWheres().size() == 1) { + where = where.getWheres().getFirst(); + } + explainWhere(codes, where); + String relation = where.getConn().name().equals("AND") ? " && " : " || "; + return Joiner.on(relation).join(codes); + } + + private void explainWhere(List codes, Where where) throws SqlParseException { + if (where instanceof Condition) { + Condition condition = (Condition) where; + + if (condition.getValue() instanceof ScriptFilter) { + codes.add("(" + ((ScriptFilter) condition.getValue()).getScript() + ")"); + } else if (condition.getOPERATOR() == Condition.OPERATOR.BETWEEN) { + Object[] objs = (Object[]) condition.getValue(); + codes.add( + "(" + + "doc['" + + condition.getName() + + "'].value >= " + + objs[0] + + " && doc['" + + condition.getName() + + "'].value <=" + + objs[1] + + ")"); + } else { + SQLExpr nameExpr = condition.getNameExpr(); + SQLExpr valueExpr = condition.getValueExpr(); + if (valueExpr instanceof SQLNullExpr) { + codes.add("(" + "doc['" + nameExpr.toString() + "']" + ".empty)"); } else { - for (Where subWhere : where.getWheres()) { - List subCodes = new ArrayList<>(); - explainWhere(subCodes, subWhere); - String relation = subWhere.getConn().name().equals("AND") ? "&&" : "||"; - codes.add(Joiner.on(relation).join(subCodes)); - } + codes.add( + "(" + + Util.getScriptValueWithQuote(nameExpr, "'") + + condition.getOpertatorSymbol() + + Util.getScriptValueWithQuote(valueExpr, "'") + + ")"); } + } + } else { + for (Where subWhere : where.getWheres()) { + List subCodes = new ArrayList<>(); + explainWhere(subCodes, subWhere); + String relation = subWhere.getConn().name().equals("AND") ? "&&" : "||"; + codes.add(Joiner.on(relation).join(subCodes)); + } } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java index 74945cb94f..27374849df 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -16,56 +15,55 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Razma Tazz on 14/04/2016. - */ +/** Created by Razma Tazz on 14/04/2016. */ public class ChildrenType { - public String field; - public String childType; - public Where where; - private boolean simple; + public String field; + public String childType; + public Where where; + private boolean simple; - public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { - if (!(expr instanceof SQLMethodInvokeExpr)) { - return false; - } - SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; - - String methodName = method.getMethodName(); + public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { + if (!(expr instanceof SQLMethodInvokeExpr)) { + return false; + } + SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; - if (!methodName.toLowerCase().equals("children")) { - return false; - } + String methodName = method.getMethodName(); - List parameters = method.getParameters(); + if (!methodName.toLowerCase().equals("children")) { + return false; + } - if (parameters.size() != 2) { - throw new SqlParseException( - "on children object only allowed 2 parameters (type, field)/(type, conditions...) "); - } + List parameters = method.getParameters(); - String type = Util.extendedToString(parameters.get(0)); - this.childType = type; + if (parameters.size() != 2) { + throw new SqlParseException( + "on children object only allowed 2 parameters (type, field)/(type, conditions...) "); + } - SQLExpr secondParameter = parameters.get(1); - if (secondParameter instanceof SQLTextLiteralExpr || secondParameter instanceof SQLIdentifierExpr - || secondParameter instanceof SQLPropertyExpr) { - this.field = Util.extendedToString(secondParameter); - this.simple = true; - } else { - Where where = Where.newInstance(); - new WhereParser(new SqlParser()).parseWhere(secondParameter, where); - if (where.getWheres().size() == 0) { - throw new SqlParseException("Failed to parse filter condition"); - } - this.where = where; - simple = false; - } + String type = Util.extendedToString(parameters.get(0)); + this.childType = type; - return true; + SQLExpr secondParameter = parameters.get(1); + if (secondParameter instanceof SQLTextLiteralExpr + || secondParameter instanceof SQLIdentifierExpr + || secondParameter instanceof SQLPropertyExpr) { + this.field = Util.extendedToString(secondParameter); + this.simple = true; + } else { + Where where = Where.newInstance(); + new WhereParser(new SqlParser()).parseWhere(secondParameter, where); + if (where.getWheres().size() == 0) { + throw new SqlParseException("Failed to parse filter condition"); + } + this.where = where; + simple = false; } - public boolean isSimple() { - return simple; - } + return true; + } + + public boolean isSimple() { + return simple; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java index 8720c3ba85..67b49fb4ad 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import static com.alibaba.druid.sql.parser.CharTypes.isFirstIdentifierChar; @@ -14,86 +13,82 @@ import com.alibaba.druid.sql.parser.ParserException; import com.alibaba.druid.sql.parser.Token; -/** - * Created by Eliran on 18/8/2015. - */ +/** Created by Eliran on 18/8/2015. */ public class ElasticLexer extends MySqlLexer { - public ElasticLexer(String input) { - super(input); - } + public ElasticLexer(String input) { + super(input); + } + public ElasticLexer(char[] input, int inputLength, boolean skipComment) { + super(input, inputLength, skipComment); + } - public ElasticLexer(char[] input, int inputLength, boolean skipComment) { - super(input, inputLength, skipComment); - } + public void scanIdentifier() { + final char first = ch; + + if (ch == '`') { - public void scanIdentifier() { - final char first = ch; + mark = pos; + bufPos = 1; + char ch; + for (; ; ) { + ch = charAt(++pos); if (ch == '`') { + bufPos++; + ch = charAt(++pos); + break; + } else if (ch == EOI) { + throw new ParserException("illegal identifier"); + } - mark = pos; - bufPos = 1; - char ch; - for (; ; ) { - ch = charAt(++pos); - - if (ch == '`') { - bufPos++; - ch = charAt(++pos); - break; - } else if (ch == EOI) { - throw new ParserException("illegal identifier"); - } - - bufPos++; - continue; - } - - this.ch = charAt(pos); - - stringVal = subString(mark, bufPos); - Token tok = keywods.getKeyword(stringVal); - if (tok != null) { - token = tok; - } else { - token = Token.IDENTIFIER; - } - } else { - - final boolean firstFlag = isFirstIdentifierChar(first); - if (!firstFlag) { - throw new ParserException("illegal identifier"); - } - - mark = pos; - bufPos = 1; - char ch; - for (; ; ) { - ch = charAt(++pos); - - if (!isElasticIdentifierChar(ch)) { - break; - } - - bufPos++; - continue; - } - - this.ch = charAt(pos); - - stringVal = addSymbol(); - Token tok = keywods.getKeyword(stringVal); - if (tok != null) { - token = tok; - } else { - token = Token.IDENTIFIER; - } + bufPos++; + continue; + } + + this.ch = charAt(pos); + + stringVal = subString(mark, bufPos); + Token tok = keywods.getKeyword(stringVal); + if (tok != null) { + token = tok; + } else { + token = Token.IDENTIFIER; + } + } else { + + final boolean firstFlag = isFirstIdentifierChar(first); + if (!firstFlag) { + throw new ParserException("illegal identifier"); + } + + mark = pos; + bufPos = 1; + char ch; + for (; ; ) { + ch = charAt(++pos); + + if (!isElasticIdentifierChar(ch)) { + break; } - } + bufPos++; + continue; + } - private boolean isElasticIdentifierChar(char ch) { - return ch == '*' || ch == ':' || ch == '-' || ch == '.' || ch == ';' || isIdentifierChar(ch); + this.ch = charAt(pos); + + stringVal = addSymbol(); + Token tok = keywods.getKeyword(stringVal); + if (tok != null) { + token = tok; + } else { + token = Token.IDENTIFIER; + } } + } + + private boolean isElasticIdentifierChar(char ch) { + return ch == '*' || ch == ':' || ch == '-' || ch == '.' || ch == ';' || isIdentifierChar(ch); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java index 24194e8de5..57af269001 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import com.alibaba.druid.sql.ast.SQLExpr; @@ -38,457 +37,462 @@ import org.opensearch.sql.legacy.query.maker.AggMaker; import org.opensearch.sql.legacy.query.maker.QueryMaker; -/** - * Transform SQL query to OpenSearch aggregations query - */ +/** Transform SQL query to OpenSearch aggregations query */ public class AggregationQueryAction extends QueryAction { - private final Select select; - private AggMaker aggMaker = new AggMaker(); - private SearchRequestBuilder request; - - public AggregationQueryAction(Client client, Select select) { - super(client, select); - this.select = select; - } - - @Override - public SqlOpenSearchRequestBuilder explain() throws SqlParseException { - this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); - - if (select.getRowCount() == null) { - select.setRowCount(Select.DEFAULT_LIMIT); - } - - setIndicesAndTypes(); - - setWhere(select.getWhere()); - AggregationBuilder lastAgg = null; - - for (List groupBy : select.getGroupBys()) { - if (!groupBy.isEmpty()) { - Field field = groupBy.get(0); - - //make groupby can reference to field alias - lastAgg = getGroupAgg(field, select); - - if (lastAgg instanceof TermsAggregationBuilder) { - - // TODO: Consider removing that condition - // in theory we should be able to apply this for all types of fields, but - // this change requires too much of related integration tests (e.g. there are comparisons against - // raw javascript dsl, so I'd like to scope the changes as of now to one particular fix for - // scripted functions - - // the condition `field.getName().equals("script")` is to include the CAST cases, since the cast - // method is instance of MethodField with script. => corrects the shard size of CASTs - if (!(field instanceof MethodField) || field instanceof ScriptMethodField - || field.getName().equals("script")) { - //if limit size is too small, increasing shard size is required - if (select.getRowCount() < 200) { - ((TermsAggregationBuilder) lastAgg).shardSize(2000); - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.SHARD_SIZE) { - if (hint.getParams() != null && hint.getParams().length != 0 - && hint.getParams()[0] != null) { - ((TermsAggregationBuilder) lastAgg).shardSize((Integer) hint.getParams()[0]); - } - } - } - } - - if (select.getRowCount() > 0) { - ((TermsAggregationBuilder) lastAgg).size(select.getRowCount()); - } - } - } - - if (field.isNested()) { - AggregationBuilder nestedBuilder = createNestedAggregation(field); - - if (insertFilterIfExistsAfter(lastAgg, groupBy, nestedBuilder, 1)) { - groupBy.remove(1); - } else { - nestedBuilder.subAggregation(lastAgg); - } + private final Select select; + private AggMaker aggMaker = new AggMaker(); + private SearchRequestBuilder request; - request.addAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); - } else if (field.isChildren()) { - AggregationBuilder childrenBuilder = createChildrenAggregation(field); + public AggregationQueryAction(Client client, Select select) { + super(client, select); + this.select = select; + } - if (insertFilterIfExistsAfter(lastAgg, groupBy, childrenBuilder, 1)) { - groupBy.remove(1); - } else { - childrenBuilder.subAggregation(lastAgg); - } + @Override + public SqlOpenSearchRequestBuilder explain() throws SqlParseException { + this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); - request.addAggregation(childrenBuilder); - } else { - request.addAggregation(lastAgg); - } + if (select.getRowCount() == null) { + select.setRowCount(Select.DEFAULT_LIMIT); + } - for (int i = 1; i < groupBy.size(); i++) { - field = groupBy.get(i); - AggregationBuilder subAgg = getGroupAgg(field, select); - //ES5.0 termsaggregation with size = 0 not supported anymore -// if (subAgg instanceof TermsAggregationBuilder && !(field instanceof MethodField)) { - -// //((TermsAggregationBuilder) subAgg).size(0); -// } - - if (field.isNested()) { - AggregationBuilder nestedBuilder = createNestedAggregation(field); - - if (insertFilterIfExistsAfter(subAgg, groupBy, nestedBuilder, i + 1)) { - groupBy.remove(i + 1); - i++; - } else { - nestedBuilder.subAggregation(subAgg); - } - - lastAgg.subAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); - } else if (field.isChildren()) { - AggregationBuilder childrenBuilder = createChildrenAggregation(field); - - if (insertFilterIfExistsAfter(subAgg, groupBy, childrenBuilder, i + 1)) { - groupBy.remove(i + 1); - i++; - } else { - childrenBuilder.subAggregation(subAgg); - } - - lastAgg.subAggregation(childrenBuilder); - } else { - lastAgg.subAggregation(subAgg); - } - - lastAgg = subAgg; + setIndicesAndTypes(); + + setWhere(select.getWhere()); + AggregationBuilder lastAgg = null; + + for (List groupBy : select.getGroupBys()) { + if (!groupBy.isEmpty()) { + Field field = groupBy.get(0); + + // make groupby can reference to field alias + lastAgg = getGroupAgg(field, select); + + if (lastAgg instanceof TermsAggregationBuilder) { + + // TODO: Consider removing that condition + // in theory we should be able to apply this for all types of fields, but + // this change requires too much of related integration tests (e.g. there are comparisons + // against + // raw javascript dsl, so I'd like to scope the changes as of now to one particular fix + // for + // scripted functions + + // the condition `field.getName().equals("script")` is to include the CAST cases, since + // the cast + // method is instance of MethodField with script. => corrects the shard size of CASTs + if (!(field instanceof MethodField) + || field instanceof ScriptMethodField + || field.getName().equals("script")) { + // if limit size is too small, increasing shard size is required + if (select.getRowCount() < 200) { + ((TermsAggregationBuilder) lastAgg).shardSize(2000); + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.SHARD_SIZE) { + if (hint.getParams() != null + && hint.getParams().length != 0 + && hint.getParams()[0] != null) { + ((TermsAggregationBuilder) lastAgg).shardSize((Integer) hint.getParams()[0]); + } } + } } - // explain the field from SELECT and HAVING clause - List combinedList = new ArrayList<>(); - combinedList.addAll(select.getFields()); - if (select.getHaving() != null) { - combinedList.addAll(select.getHaving().getHavingFields()); + if (select.getRowCount() > 0) { + ((TermsAggregationBuilder) lastAgg).size(select.getRowCount()); } - // add aggregation function to each groupBy - explanFields(request, combinedList, lastAgg); - - explainHaving(lastAgg); + } } - if (select.getGroupBys().size() < 1) { - //add aggregation when having no groupBy script - explanFields(request, select.getFields(), lastAgg); + if (field.isNested()) { + AggregationBuilder nestedBuilder = createNestedAggregation(field); - } + if (insertFilterIfExistsAfter(lastAgg, groupBy, nestedBuilder, 1)) { + groupBy.remove(1); + } else { + nestedBuilder.subAggregation(lastAgg); + } - Map groupMap = aggMaker.getGroupMap(); - // add field - if (select.getFields().size() > 0) { - setFields(select.getFields()); -// explanFields(request, select.getFields(), lastAgg); - } + request.addAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); + } else if (field.isChildren()) { + AggregationBuilder childrenBuilder = createChildrenAggregation(field); - // add order - if (lastAgg != null && select.getOrderBys().size() > 0) { - for (Order order : select.getOrderBys()) { - - // check "standard" fields - KVValue temp = groupMap.get(order.getName()); - if (temp != null) { - TermsAggregationBuilder termsBuilder = (TermsAggregationBuilder) temp.value; - switch (temp.key) { - case "COUNT": - termsBuilder.order(BucketOrder.count(isASC(order))); - break; - case "KEY": - termsBuilder.order(BucketOrder.key(isASC(order))); - break; - case "FIELD": - termsBuilder.order(BucketOrder.aggregation(order.getName(), isASC(order))); - break; - default: - throw new SqlParseException(order.getName() + " can not to order"); - } - } else if (order.isScript()) { - // Do not add scripted fields into sort, they must be sorted inside of aggregation - } else { - // TODO: Is there a legit case when we want to add field into sort for aggregation queries? - request.addSort(order.getName(), SortOrder.valueOf(order.getType())); - } - } + if (insertFilterIfExistsAfter(lastAgg, groupBy, childrenBuilder, 1)) { + groupBy.remove(1); + } else { + childrenBuilder.subAggregation(lastAgg); + } + + request.addAggregation(childrenBuilder); + } else { + request.addAggregation(lastAgg); } - setLimitFromHint(this.select.getHints()); + for (int i = 1; i < groupBy.size(); i++) { + field = groupBy.get(i); + AggregationBuilder subAgg = getGroupAgg(field, select); + // ES5.0 termsaggregation with size = 0 not supported anymore + // if (subAgg instanceof TermsAggregationBuilder && !(field instanceof + // MethodField)) { - request.setSearchType(SearchType.DEFAULT); - updateRequestWithIndexAndRoutingOptions(select, request); - updateRequestWithHighlight(select, request); - updateRequestWithCollapse(select, request); - updateRequestWithPostFilter(select, request); - return new SqlOpenSearchRequestBuilder(request); - } + // //((TermsAggregationBuilder) subAgg).size(0); + // } - private AggregationBuilder getGroupAgg(Field groupByField, Select select) throws SqlParseException { - AggregationBuilder lastAgg = null; - Field shadowField = null; - - for (Field selectField : select.getFields()) { - if (selectField instanceof MethodField && selectField.getName().equals("script")) { - MethodField scriptField = (MethodField) selectField; - for (KVValue kv : scriptField.getParams()) { - if (kv.value.equals(groupByField.getName())) { - shadowField = scriptField; - break; - } - } - } - } + if (field.isNested()) { + AggregationBuilder nestedBuilder = createNestedAggregation(field); - if (shadowField == null) { - for (Field selectField: select.getFields()) { - if (selectField.getAlias() != null - && (groupByField.getName().equals(selectField.getAlias()) - || groupByField.getExpression().equals(selectField.getExpression()))) { - shadowField = selectField; - } + if (insertFilterIfExistsAfter(subAgg, groupBy, nestedBuilder, i + 1)) { + groupBy.remove(i + 1); + i++; + } else { + nestedBuilder.subAggregation(subAgg); } - } + lastAgg.subAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); + } else if (field.isChildren()) { + AggregationBuilder childrenBuilder = createChildrenAggregation(field); - if (null != shadowField) { - groupByField.setAlias(shadowField.getAlias()); - groupByField = shadowField; - } + if (insertFilterIfExistsAfter(subAgg, groupBy, childrenBuilder, i + 1)) { + groupBy.remove(i + 1); + i++; + } else { + childrenBuilder.subAggregation(subAgg); + } - lastAgg = aggMaker.makeGroupAgg(groupByField); + lastAgg.subAggregation(childrenBuilder); + } else { + lastAgg.subAggregation(subAgg); + } - // find if we have order for that aggregation. As of now only special case for script fields - if (groupByField.isScriptField()) { - addOrderByScriptFieldIfPresent(select, (TermsAggregationBuilder) lastAgg, groupByField.getExpression()); + lastAgg = subAgg; } + } + + // explain the field from SELECT and HAVING clause + List combinedList = new ArrayList<>(); + combinedList.addAll(select.getFields()); + if (select.getHaving() != null) { + combinedList.addAll(select.getHaving().getHavingFields()); + } + // add aggregation function to each groupBy + explanFields(request, combinedList, lastAgg); + + explainHaving(lastAgg); + } - return lastAgg; + if (select.getGroupBys().size() < 1) { + // add aggregation when having no groupBy script + explanFields(request, select.getFields(), lastAgg); } - private void addOrderByScriptFieldIfPresent(Select select, TermsAggregationBuilder groupByAggregation, - SQLExpr groupByExpression) { - // TODO: Explore other ways to correlate different fields/functions in the query (params?) - // This feels like a hacky way, but it's the best that could be done now. - select - .getOrderBys() - .stream() - .filter(order -> groupByExpression.equals(order.getSortField().getExpression())) - .findFirst() - .ifPresent(orderForGroupBy -> groupByAggregation.order(BucketOrder.key(isASC(orderForGroupBy)))); + Map groupMap = aggMaker.getGroupMap(); + // add field + if (select.getFields().size() > 0) { + setFields(select.getFields()); + // explanFields(request, select.getFields(), lastAgg); } - private AggregationBuilder wrapNestedIfNeeded(AggregationBuilder nestedBuilder, boolean reverseNested) { - if (!reverseNested) { - return nestedBuilder; + // add order + if (lastAgg != null && select.getOrderBys().size() > 0) { + for (Order order : select.getOrderBys()) { + + // check "standard" fields + KVValue temp = groupMap.get(order.getName()); + if (temp != null) { + TermsAggregationBuilder termsBuilder = (TermsAggregationBuilder) temp.value; + switch (temp.key) { + case "COUNT": + termsBuilder.order(BucketOrder.count(isASC(order))); + break; + case "KEY": + termsBuilder.order(BucketOrder.key(isASC(order))); + break; + case "FIELD": + termsBuilder.order(BucketOrder.aggregation(order.getName(), isASC(order))); + break; + default: + throw new SqlParseException(order.getName() + " can not to order"); + } + } else if (order.isScript()) { + // Do not add scripted fields into sort, they must be sorted inside of aggregation + } else { + // TODO: Is there a legit case when we want to add field into sort for aggregation + // queries? + request.addSort(order.getName(), SortOrder.valueOf(order.getType())); } - if (reverseNested && !(nestedBuilder instanceof NestedAggregationBuilder)) { - return nestedBuilder; + } + } + + setLimitFromHint(this.select.getHints()); + + request.setSearchType(SearchType.DEFAULT); + updateRequestWithIndexAndRoutingOptions(select, request); + updateRequestWithHighlight(select, request); + updateRequestWithCollapse(select, request); + updateRequestWithPostFilter(select, request); + return new SqlOpenSearchRequestBuilder(request); + } + + private AggregationBuilder getGroupAgg(Field groupByField, Select select) + throws SqlParseException { + AggregationBuilder lastAgg = null; + Field shadowField = null; + + for (Field selectField : select.getFields()) { + if (selectField instanceof MethodField && selectField.getName().equals("script")) { + MethodField scriptField = (MethodField) selectField; + for (KVValue kv : scriptField.getParams()) { + if (kv.value.equals(groupByField.getName())) { + shadowField = scriptField; + break; + } } - //we need to jump back to root - return AggregationBuilders.reverseNested(nestedBuilder.getName() + "_REVERSED").subAggregation(nestedBuilder); + } } - private AggregationBuilder createNestedAggregation(Field field) { - AggregationBuilder nestedBuilder; + if (shadowField == null) { + for (Field selectField : select.getFields()) { + if (selectField.getAlias() != null + && (groupByField.getName().equals(selectField.getAlias()) + || groupByField.getExpression().equals(selectField.getExpression()))) { + shadowField = selectField; + } + } + } - String nestedPath = field.getNestedPath(); + if (null != shadowField) { + groupByField.setAlias(shadowField.getAlias()); + groupByField = shadowField; + } - if (field.isReverseNested()) { - if (nestedPath == null || !nestedPath.startsWith("~")) { - ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = - AggregationBuilders.reverseNested(getNestedAggName(field)); - if (nestedPath != null) { - reverseNestedAggregationBuilder.path(nestedPath); - } - return reverseNestedAggregationBuilder; - } - nestedPath = nestedPath.substring(1); - } + lastAgg = aggMaker.makeGroupAgg(groupByField); - nestedBuilder = AggregationBuilders.nested(getNestedAggName(field), nestedPath); + // find if we have order for that aggregation. As of now only special case for script fields + if (groupByField.isScriptField()) { + addOrderByScriptFieldIfPresent( + select, (TermsAggregationBuilder) lastAgg, groupByField.getExpression()); + } - return nestedBuilder; + return lastAgg; + } + + private void addOrderByScriptFieldIfPresent( + Select select, TermsAggregationBuilder groupByAggregation, SQLExpr groupByExpression) { + // TODO: Explore other ways to correlate different fields/functions in the query (params?) + // This feels like a hacky way, but it's the best that could be done now. + select.getOrderBys().stream() + .filter(order -> groupByExpression.equals(order.getSortField().getExpression())) + .findFirst() + .ifPresent( + orderForGroupBy -> groupByAggregation.order(BucketOrder.key(isASC(orderForGroupBy)))); + } + + private AggregationBuilder wrapNestedIfNeeded( + AggregationBuilder nestedBuilder, boolean reverseNested) { + if (!reverseNested) { + return nestedBuilder; + } + if (reverseNested && !(nestedBuilder instanceof NestedAggregationBuilder)) { + return nestedBuilder; + } + // we need to jump back to root + return AggregationBuilders.reverseNested(nestedBuilder.getName() + "_REVERSED") + .subAggregation(nestedBuilder); + } + + private AggregationBuilder createNestedAggregation(Field field) { + AggregationBuilder nestedBuilder; + + String nestedPath = field.getNestedPath(); + + if (field.isReverseNested()) { + if (nestedPath == null || !nestedPath.startsWith("~")) { + ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = + AggregationBuilders.reverseNested(getNestedAggName(field)); + if (nestedPath != null) { + reverseNestedAggregationBuilder.path(nestedPath); + } + return reverseNestedAggregationBuilder; + } + nestedPath = nestedPath.substring(1); } - private AggregationBuilder createChildrenAggregation(Field field) { - AggregationBuilder childrenBuilder; + nestedBuilder = AggregationBuilders.nested(getNestedAggName(field), nestedPath); - String childType = field.getChildType(); + return nestedBuilder; + } - childrenBuilder = JoinAggregationBuilders.children(getChildrenAggName(field), childType); + private AggregationBuilder createChildrenAggregation(Field field) { + AggregationBuilder childrenBuilder; - return childrenBuilder; - } + String childType = field.getChildType(); - private String getNestedAggName(Field field) { - String prefix; + childrenBuilder = JoinAggregationBuilders.children(getChildrenAggName(field), childType); - if (field instanceof MethodField) { - String nestedPath = field.getNestedPath(); - if (nestedPath != null) { - prefix = nestedPath; - } else { - prefix = field.getAlias(); - } - } else { - prefix = field.getName(); - } - return prefix + "@NESTED"; - } + return childrenBuilder; + } - private String getChildrenAggName(Field field) { - String prefix; + private String getNestedAggName(Field field) { + String prefix; - if (field instanceof MethodField) { - String childType = field.getChildType(); + if (field instanceof MethodField) { + String nestedPath = field.getNestedPath(); + if (nestedPath != null) { + prefix = nestedPath; + } else { + prefix = field.getAlias(); + } + } else { + prefix = field.getName(); + } + return prefix + "@NESTED"; + } + + private String getChildrenAggName(Field field) { + String prefix; + + if (field instanceof MethodField) { + String childType = field.getChildType(); + + if (childType != null) { + prefix = childType; + } else { + prefix = field.getAlias(); + } + } else { + prefix = field.getName(); + } - if (childType != null) { - prefix = childType; - } else { - prefix = field.getAlias(); - } - } else { - prefix = field.getName(); - } + return prefix + "@CHILDREN"; + } - return prefix + "@CHILDREN"; + private boolean insertFilterIfExistsAfter( + AggregationBuilder agg, List groupBy, AggregationBuilder builder, int nextPosition) + throws SqlParseException { + if (groupBy.size() <= nextPosition) { + return false; } - - private boolean insertFilterIfExistsAfter(AggregationBuilder agg, List groupBy, AggregationBuilder builder, - int nextPosition) throws SqlParseException { - if (groupBy.size() <= nextPosition) { - return false; - } - Field filterFieldCandidate = groupBy.get(nextPosition); - if (!(filterFieldCandidate instanceof MethodField)) { - return false; - } - MethodField methodField = (MethodField) filterFieldCandidate; - if (!methodField.getName().toLowerCase().equals("filter")) { - return false; - } - builder.subAggregation(aggMaker.makeGroupAgg(filterFieldCandidate).subAggregation(agg)); - return true; + Field filterFieldCandidate = groupBy.get(nextPosition); + if (!(filterFieldCandidate instanceof MethodField)) { + return false; } - - private AggregationBuilder updateAggIfNested(AggregationBuilder lastAgg, Field field) { - if (field.isNested()) { - lastAgg = AggregationBuilders.nested(field.getName() + "Nested", field.getNestedPath()) - .subAggregation(lastAgg); - } - return lastAgg; + MethodField methodField = (MethodField) filterFieldCandidate; + if (!methodField.getName().toLowerCase().equals("filter")) { + return false; } - - private boolean isASC(Order order) { - return "ASC".equals(order.getType()); + builder.subAggregation(aggMaker.makeGroupAgg(filterFieldCandidate).subAggregation(agg)); + return true; + } + + private AggregationBuilder updateAggIfNested(AggregationBuilder lastAgg, Field field) { + if (field.isNested()) { + lastAgg = + AggregationBuilders.nested(field.getName() + "Nested", field.getNestedPath()) + .subAggregation(lastAgg); } + return lastAgg; + } - private void setFields(List fields) { - if (select.getFields().size() > 0) { - ArrayList includeFields = new ArrayList<>(); + private boolean isASC(Order order) { + return "ASC".equals(order.getType()); + } - for (Field field : fields) { - if (field != null) { - includeFields.add(field.getName()); - } - } + private void setFields(List fields) { + if (select.getFields().size() > 0) { + ArrayList includeFields = new ArrayList<>(); - request.setFetchSource(includeFields.toArray(new String[0]), null); + for (Field field : fields) { + if (field != null) { + includeFields.add(field.getName()); } - } + } - private void explanFields(SearchRequestBuilder request, List fields, AggregationBuilder groupByAgg) - throws SqlParseException { - for (Field field : fields) { - if (field instanceof MethodField) { - - if (field.getName().equals("script")) { - request.addStoredField(field.getAlias()); - DefaultQueryAction defaultQueryAction = new DefaultQueryAction(client, select); - defaultQueryAction.initialize(request); - List tempFields = Lists.newArrayList(field); - defaultQueryAction.setFields(tempFields); - continue; - } - - AggregationBuilder makeAgg = aggMaker - .withWhere(select.getWhere()) - .makeFieldAgg((MethodField) field, groupByAgg); - if (groupByAgg != null) { - groupByAgg.subAggregation(makeAgg); - } else { - request.addAggregation(makeAgg); - } - } else if (field != null) { - request.addStoredField(field.getName()); - } else { - throw new SqlParseException("it did not support this field method " + field); - } - } + request.setFetchSource(includeFields.toArray(new String[0]), null); } - - private void explainHaving(AggregationBuilder lastAgg) throws SqlParseException { - Having having = select.getHaving(); - if (having != null) { - having.explain(lastAgg, select.getFields()); + } + + private void explanFields( + SearchRequestBuilder request, List fields, AggregationBuilder groupByAgg) + throws SqlParseException { + for (Field field : fields) { + if (field instanceof MethodField) { + + if (field.getName().equals("script")) { + request.addStoredField(field.getAlias()); + DefaultQueryAction defaultQueryAction = new DefaultQueryAction(client, select); + defaultQueryAction.initialize(request); + List tempFields = Lists.newArrayList(field); + defaultQueryAction.setFields(tempFields); + continue; } - } - /** - * Create filters based on - * the Where clause. - * - * @param where the 'WHERE' part of the SQL query. - * @throws SqlParseException - */ - private void setWhere(Where where) throws SqlParseException { - BoolQueryBuilder boolQuery = null; - if (where != null) { - boolQuery = QueryMaker.explain(where, this.select.isQuery); - } - // Used to prevent NullPointerException in old tests as they do not set sqlRequest in QueryAction - if (sqlRequest != null) { - boolQuery = sqlRequest.checkAndAddFilter(boolQuery); + AggregationBuilder makeAgg = + aggMaker.withWhere(select.getWhere()).makeFieldAgg((MethodField) field, groupByAgg); + if (groupByAgg != null) { + groupByAgg.subAggregation(makeAgg); + } else { + request.addAggregation(makeAgg); } - request.setQuery(boolQuery); + } else if (field != null) { + request.addStoredField(field.getName()); + } else { + throw new SqlParseException("it did not support this field method " + field); + } } + } - - /** - * Set indices and types to the search request. - */ - private void setIndicesAndTypes() { - request.setIndices(query.getIndexArr()); + private void explainHaving(AggregationBuilder lastAgg) throws SqlParseException { + Having having = select.getHaving(); + if (having != null) { + having.explain(lastAgg, select.getFields()); } - - private void setLimitFromHint(List hints) { - int from = 0; - int size = 0; - for (Hint hint : hints) { - if (hint.getType() == HintType.DOCS_WITH_AGGREGATION) { - Integer[] params = (Integer[]) hint.getParams(); - if (params.length > 1) { - // if 2 or more are given, use the first as the from and the second as the size - // so it is the same as LIMIT from,size - // except written as /*! DOCS_WITH_AGGREGATION(from,size) */ - from = params[0]; - size = params[1]; - } else if (params.length == 1) { - // if only 1 parameter is given, use it as the size with a from of 0 - size = params[0]; - } - break; - } + } + + /** + * Create filters based on the Where clause. + * + * @param where the 'WHERE' part of the SQL query. + * @throws SqlParseException + */ + private void setWhere(Where where) throws SqlParseException { + BoolQueryBuilder boolQuery = null; + if (where != null) { + boolQuery = QueryMaker.explain(where, this.select.isQuery); + } + // Used to prevent NullPointerException in old tests as they do not set sqlRequest in + // QueryAction + if (sqlRequest != null) { + boolQuery = sqlRequest.checkAndAddFilter(boolQuery); + } + request.setQuery(boolQuery); + } + + /** Set indices and types to the search request. */ + private void setIndicesAndTypes() { + request.setIndices(query.getIndexArr()); + } + + private void setLimitFromHint(List hints) { + int from = 0; + int size = 0; + for (Hint hint : hints) { + if (hint.getType() == HintType.DOCS_WITH_AGGREGATION) { + Integer[] params = (Integer[]) hint.getParams(); + if (params.length > 1) { + // if 2 or more are given, use the first as the from and the second as the size + // so it is the same as LIMIT from,size + // except written as /*! DOCS_WITH_AGGREGATION(from,size) */ + from = params[0]; + size = params[1]; + } else if (params.length == 1) { + // if only 1 parameter is given, use it as the size with a from of 0 + size = params[0]; } - request.setFrom(from); - request.setSize(size); + break; + } } + request.setFrom(from); + request.setSize(size); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java index 0ed5043ac8..18c9708df8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import com.alibaba.druid.sql.ast.SQLExpr; @@ -50,264 +49,268 @@ import org.opensearch.sql.legacy.rewriter.nestedfield.NestedFieldProjection; import org.opensearch.sql.legacy.utils.SQLFunctions; -/** - * Transform SQL query to standard OpenSearch search query - */ +/** Transform SQL query to standard OpenSearch search query */ public class DefaultQueryAction extends QueryAction { - private final Select select; - private SearchRequestBuilder request; - - private final List fieldNames = new LinkedList<>(); - - public DefaultQueryAction(Client client, Select select) { - super(client, select); - this.select = select; - } - - public void initialize(SearchRequestBuilder request) { - this.request = request; + private final Select select; + private SearchRequestBuilder request; + + private final List fieldNames = new LinkedList<>(); + + public DefaultQueryAction(Client client, Select select) { + super(client, select); + this.select = select; + } + + public void initialize(SearchRequestBuilder request) { + this.request = request; + } + + @Override + public SqlOpenSearchRequestBuilder explain() throws SqlParseException { + Objects.requireNonNull(this.sqlRequest, "SqlRequest is required for OpenSearch request build"); + buildRequest(); + checkAndSetScroll(); + return new SqlOpenSearchRequestBuilder(request); + } + + private void buildRequest() throws SqlParseException { + this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); + setIndicesAndTypes(); + setFields(select.getFields()); + setWhere(select.getWhere()); + setSorts(select.getOrderBys()); + updateRequestWithIndexAndRoutingOptions(select, request); + updateRequestWithHighlight(select, request); + updateRequestWithCollapse(select, request); + updateRequestWithPostFilter(select, request); + updateRequestWithInnerHits(select, request); + } + + @VisibleForTesting + public void checkAndSetScroll() { + LocalClusterState clusterState = LocalClusterState.state(); + + Integer fetchSize = sqlRequest.fetchSize(); + TimeValue timeValue = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + Integer rowCount = select.getRowCount(); + + if (checkIfScrollNeeded(fetchSize, rowCount)) { + Metrics.getInstance() + .getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_COUNT_TOTAL) + .increment(); + Metrics.getInstance().getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_TOTAL).increment(); + request.setSize(fetchSize).setScroll(timeValue); + } else { + request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); + setLimit(select.getOffset(), rowCount != null ? rowCount : Select.DEFAULT_LIMIT); } - - @Override - public SqlOpenSearchRequestBuilder explain() throws SqlParseException { - Objects.requireNonNull(this.sqlRequest, "SqlRequest is required for OpenSearch request build"); - buildRequest(); - checkAndSetScroll(); - return new SqlOpenSearchRequestBuilder(request); - } - - private void buildRequest() throws SqlParseException { - this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); - setIndicesAndTypes(); - setFields(select.getFields()); - setWhere(select.getWhere()); - setSorts(select.getOrderBys()); - updateRequestWithIndexAndRoutingOptions(select, request); - updateRequestWithHighlight(select, request); - updateRequestWithCollapse(select, request); - updateRequestWithPostFilter(select, request); - updateRequestWithInnerHits(select, request); - } - - @VisibleForTesting - public void checkAndSetScroll() { - LocalClusterState clusterState = LocalClusterState.state(); - - Integer fetchSize = sqlRequest.fetchSize(); - TimeValue timeValue = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - Integer rowCount = select.getRowCount(); - - if (checkIfScrollNeeded(fetchSize, rowCount)) { - Metrics.getInstance().getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_COUNT_TOTAL).increment(); - Metrics.getInstance().getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_TOTAL).increment(); - request.setSize(fetchSize).setScroll(timeValue); - } else { - request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); - setLimit(select.getOffset(), rowCount != null ? rowCount : Select.DEFAULT_LIMIT); + } + + private boolean checkIfScrollNeeded(Integer fetchSize, Integer rowCount) { + return (format != null && format.equals(Format.JDBC)) + && fetchSize > 0 + && (rowCount == null || (rowCount > fetchSize)); + } + + @Override + public Optional> getFieldNames() { + return Optional.of(fieldNames); + } + + public Select getSelect() { + return select; + } + + /** Set indices and types to the search request. */ + private void setIndicesAndTypes() { + request.setIndices(query.getIndexArr()); + } + + /** + * Set source filtering on a search request. + * + * @param fields list of fields to source filter. + */ + public void setFields(List fields) throws SqlParseException { + + if (!select.getFields().isEmpty() && !select.isSelectAll()) { + ArrayList includeFields = new ArrayList<>(); + ArrayList excludeFields = new ArrayList<>(); + + for (Field field : fields) { + if (field instanceof MethodField) { + MethodField method = (MethodField) field; + if (method.getName().toLowerCase().equals("script")) { + handleScriptField(method); + if (method.getExpression() instanceof SQLCastExpr) { + includeFields.add(method.getParams().get(0).toString()); + } + } else if (method.getName().equalsIgnoreCase("include")) { + for (KVValue kvValue : method.getParams()) { + includeFields.add(kvValue.value.toString()); + } + } else if (method.getName().equalsIgnoreCase("exclude")) { + for (KVValue kvValue : method.getParams()) { + excludeFields.add(kvValue.value.toString()); + } + } + } else if (field != null) { + if (isNotNested(field)) { + includeFields.add(field.getName()); + } } - } - - - private boolean checkIfScrollNeeded(Integer fetchSize, Integer rowCount) { - return (format != null && format.equals(Format.JDBC)) - && fetchSize > 0 - && (rowCount == null || (rowCount > fetchSize)); - } + } - @Override - public Optional> getFieldNames() { - return Optional.of(fieldNames); + fieldNames.addAll(includeFields); + request.setFetchSource( + includeFields.toArray(new String[0]), excludeFields.toArray(new String[0])); } + } + private void handleScriptField(final MethodField method) throws SqlParseException { - public Select getSelect() { - return select; - } - - /** - * Set indices and types to the search request. - */ - private void setIndicesAndTypes() { - request.setIndices(query.getIndexArr()); - } - - /** - * Set source filtering on a search request. - * - * @param fields list of fields to source filter. - */ - public void setFields(List fields) throws SqlParseException { - - if (!select.getFields().isEmpty() && !select.isSelectAll()) { - ArrayList includeFields = new ArrayList<>(); - ArrayList excludeFields = new ArrayList<>(); - - for (Field field : fields) { - if (field instanceof MethodField) { - MethodField method = (MethodField) field; - if (method.getName().toLowerCase().equals("script")) { - handleScriptField(method); - if (method.getExpression() instanceof SQLCastExpr) { - includeFields.add(method.getParams().get(0).toString()); - } - } else if (method.getName().equalsIgnoreCase("include")) { - for (KVValue kvValue : method.getParams()) { - includeFields.add(kvValue.value.toString()); - } - } else if (method.getName().equalsIgnoreCase("exclude")) { - for (KVValue kvValue : method.getParams()) { - excludeFields.add(kvValue.value.toString()); - } - } - } else if (field != null) { - if (isNotNested(field)) { - includeFields.add(field.getName()); - } - } - } + final List params = method.getParams(); + final int numOfParams = params.size(); - fieldNames.addAll(includeFields); - request.setFetchSource(includeFields.toArray(new String[0]), excludeFields.toArray(new String[0])); - } + if (2 != numOfParams && 3 != numOfParams) { + throw new SqlParseException( + "scripted_field only allows 'script(name,script)' " + "or 'script(name,lang,script)'"); } - private void handleScriptField(final MethodField method) throws SqlParseException { - - final List params = method.getParams(); - final int numOfParams = params.size(); - - if (2 != numOfParams && 3 != numOfParams) { - throw new SqlParseException("scripted_field only allows 'script(name,script)' " - + "or 'script(name,lang,script)'"); - } - - final String fieldName = params.get(0).value.toString(); - fieldNames.add(fieldName); - - final String secondParam = params.get(1).value.toString(); - final Script script = (2 == numOfParams) ? new Script(secondParam) : - new Script(ScriptType.INLINE, secondParam, params.get(2).value.toString(), Collections.emptyMap()); - request.addScriptField(fieldName, script); + final String fieldName = params.get(0).value.toString(); + fieldNames.add(fieldName); + + final String secondParam = params.get(1).value.toString(); + final Script script = + (2 == numOfParams) + ? new Script(secondParam) + : new Script( + ScriptType.INLINE, + secondParam, + params.get(2).value.toString(), + Collections.emptyMap()); + request.addScriptField(fieldName, script); + } + + /** + * Create filters or queries based on the Where clause. + * + * @param where the 'WHERE' part of the SQL query. + * @throws SqlParseException if the where clause does not represent valid sql + */ + private void setWhere(Where where) throws SqlParseException { + BoolQueryBuilder boolQuery = null; + if (where != null) { + boolQuery = QueryMaker.explain(where, this.select.isQuery); } - - /** - * Create filters or queries based on the Where clause. - * - * @param where the 'WHERE' part of the SQL query. - * @throws SqlParseException if the where clause does not represent valid sql - */ - private void setWhere(Where where) throws SqlParseException { - BoolQueryBuilder boolQuery = null; - if (where != null) { - boolQuery = QueryMaker.explain(where, this.select.isQuery); - } - // Used to prevent NullPointerException in old tests as they do not set sqlRequest in QueryAction - if (sqlRequest != null) { - boolQuery = sqlRequest.checkAndAddFilter(boolQuery); - } - request.setQuery(boolQuery); + // Used to prevent NullPointerException in old tests as they do not set sqlRequest in + // QueryAction + if (sqlRequest != null) { + boolQuery = sqlRequest.checkAndAddFilter(boolQuery); } - - /** - * Add sorts to the OpenSearch query based on the 'ORDER BY' clause. - * - * @param orderBys list of Order object - */ - private void setSorts(List orderBys) { - Map sortBuilderMap = new HashMap<>(); - - for (Order order : orderBys) { - String orderByName = order.getName(); - SortOrder sortOrder = SortOrder.valueOf(order.getType()); - - if (order.getNestedPath() != null) { - request.addSort( - SortBuilders.fieldSort(orderByName) - .order(sortOrder) - .setNestedSort(new NestedSortBuilder(order.getNestedPath()))); - } else if (order.isScript()) { - // TODO: Investigate how to find the type of expression (string or number) - // As of now this shouldn't be a problem, because the support is for date_format function - request.addSort( - SortBuilders - .scriptSort(new Script(orderByName), getScriptSortType(order)) - .order(sortOrder)); - } else if (orderByName.equals(ScoreSortBuilder.NAME)) { - request.addSort(orderByName, sortOrder); - } else { - FieldSortBuilder fieldSortBuilder = sortBuilderMap.computeIfAbsent(orderByName, key -> { - FieldSortBuilder fs = SortBuilders.fieldSort(key); - request.addSort(fs); - return fs; + request.setQuery(boolQuery); + } + + /** + * Add sorts to the OpenSearch query based on the 'ORDER BY' clause. + * + * @param orderBys list of Order object + */ + private void setSorts(List orderBys) { + Map sortBuilderMap = new HashMap<>(); + + for (Order order : orderBys) { + String orderByName = order.getName(); + SortOrder sortOrder = SortOrder.valueOf(order.getType()); + + if (order.getNestedPath() != null) { + request.addSort( + SortBuilders.fieldSort(orderByName) + .order(sortOrder) + .setNestedSort(new NestedSortBuilder(order.getNestedPath()))); + } else if (order.isScript()) { + // TODO: Investigate how to find the type of expression (string or number) + // As of now this shouldn't be a problem, because the support is for date_format function + request.addSort( + SortBuilders.scriptSort(new Script(orderByName), getScriptSortType(order)) + .order(sortOrder)); + } else if (orderByName.equals(ScoreSortBuilder.NAME)) { + request.addSort(orderByName, sortOrder); + } else { + FieldSortBuilder fieldSortBuilder = + sortBuilderMap.computeIfAbsent( + orderByName, + key -> { + FieldSortBuilder fs = SortBuilders.fieldSort(key); + request.addSort(fs); + return fs; }); - setSortParams(fieldSortBuilder, order); - } - } + setSortParams(fieldSortBuilder, order); + } } + } + private void setSortParams(FieldSortBuilder fieldSortBuilder, Order order) { + fieldSortBuilder.order(SortOrder.valueOf(order.getType())); - private void setSortParams(FieldSortBuilder fieldSortBuilder, Order order) { - fieldSortBuilder.order(SortOrder.valueOf(order.getType())); - - SQLExpr expr = order.getSortField().getExpression(); - if (expr instanceof SQLBinaryOpExpr) { - // we set SQLBinaryOpExpr in Field.setExpression() to support ORDER by IS NULL/IS NOT NULL - fieldSortBuilder.missing(getNullOrderString((SQLBinaryOpExpr) expr)); - } + SQLExpr expr = order.getSortField().getExpression(); + if (expr instanceof SQLBinaryOpExpr) { + // we set SQLBinaryOpExpr in Field.setExpression() to support ORDER by IS NULL/IS NOT NULL + fieldSortBuilder.missing(getNullOrderString((SQLBinaryOpExpr) expr)); } - - private String getNullOrderString(SQLBinaryOpExpr expr) { - SQLBinaryOperator operator = expr.getOperator(); - return operator == SQLBinaryOperator.IsNot ? "_first" : "_last"; + } + + private String getNullOrderString(SQLBinaryOpExpr expr) { + SQLBinaryOperator operator = expr.getOperator(); + return operator == SQLBinaryOperator.IsNot ? "_first" : "_last"; + } + + private ScriptSortType getScriptSortType(Order order) { + ScriptSortType scriptSortType; + Schema.Type scriptFunctionReturnType = SQLFunctions.getOrderByFieldType(order.getSortField()); + + // as of now script function return type returns only text and double + switch (scriptFunctionReturnType) { + case TEXT: + scriptSortType = ScriptSortType.STRING; + break; + + case DOUBLE: + case FLOAT: + case INTEGER: + case LONG: + scriptSortType = ScriptSortType.NUMBER; + break; + default: + throw new IllegalStateException("Unknown type: " + scriptFunctionReturnType); } - - private ScriptSortType getScriptSortType(Order order) { - ScriptSortType scriptSortType; - Schema.Type scriptFunctionReturnType = SQLFunctions.getOrderByFieldType(order.getSortField()); - - - // as of now script function return type returns only text and double - switch (scriptFunctionReturnType) { - case TEXT: - scriptSortType = ScriptSortType.STRING; - break; - - case DOUBLE: - case FLOAT: - case INTEGER: - case LONG: - scriptSortType = ScriptSortType.NUMBER; - break; - default: - throw new IllegalStateException("Unknown type: " + scriptFunctionReturnType); - } - return scriptSortType; + return scriptSortType; + } + + /** + * Add from and size to the OpenSearch query based on the 'LIMIT' clause + * + * @param from starts from document at position from + * @param size number of documents to return. + */ + private void setLimit(int from, int size) { + request.setFrom(from); + + if (size > -1) { + request.setSize(size); } + } - /** - * Add from and size to the OpenSearch query based on the 'LIMIT' clause - * - * @param from starts from document at position from - * @param size number of documents to return. - */ - private void setLimit(int from, int size) { - request.setFrom(from); - - if (size > -1) { - request.setSize(size); - } - } + public SearchRequestBuilder getRequestBuilder() { + return request; + } - public SearchRequestBuilder getRequestBuilder() { - return request; - } + private boolean isNotNested(Field field) { + return !field.isNested() || field.isReverseNested(); + } - private boolean isNotNested(Field field) { - return !field.isNested() || field.isReverseNested(); - } - - private void updateRequestWithInnerHits(Select select, SearchRequestBuilder request) { - new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); - } + private void updateRequestWithInnerHits(Select select, SearchRequestBuilder request) { + new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java index 892c5aeb2d..331921345f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; - import org.opensearch.client.Client; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -20,50 +18,44 @@ public class DeleteQueryAction extends QueryAction { - private final Delete delete; - private DeleteByQueryRequestBuilder request; - - public DeleteQueryAction(Client client, Delete delete) { - super(client, delete); - this.delete = delete; - } - - @Override - public SqlElasticDeleteByQueryRequestBuilder explain() throws SqlParseException { - this.request = new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE); - - setIndicesAndTypes(); - setWhere(delete.getWhere()); - SqlElasticDeleteByQueryRequestBuilder deleteByQueryRequestBuilder = - new SqlElasticDeleteByQueryRequestBuilder(request); - return deleteByQueryRequestBuilder; - } - - - /** - * Set indices and types to the delete by query request. - */ - private void setIndicesAndTypes() { - - DeleteByQueryRequest innerRequest = request.request(); - innerRequest.indices(query.getIndexArr()); + private final Delete delete; + private DeleteByQueryRequestBuilder request; + + public DeleteQueryAction(Client client, Delete delete) { + super(client, delete); + this.delete = delete; + } + + @Override + public SqlElasticDeleteByQueryRequestBuilder explain() throws SqlParseException { + this.request = new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE); + + setIndicesAndTypes(); + setWhere(delete.getWhere()); + SqlElasticDeleteByQueryRequestBuilder deleteByQueryRequestBuilder = + new SqlElasticDeleteByQueryRequestBuilder(request); + return deleteByQueryRequestBuilder; + } + + /** Set indices and types to the delete by query request. */ + private void setIndicesAndTypes() { + + DeleteByQueryRequest innerRequest = request.request(); + innerRequest.indices(query.getIndexArr()); + } + + /** + * Create filters based on the Where clause. + * + * @param where the 'WHERE' part of the SQL query. + * @throws SqlParseException + */ + private void setWhere(Where where) throws SqlParseException { + if (where != null) { + QueryBuilder whereQuery = QueryMaker.explain(where); + request.filter(whereQuery); + } else { + request.filter(QueryBuilders.matchAllQuery()); } - - - /** - * Create filters based on - * the Where clause. - * - * @param where the 'WHERE' part of the SQL query. - * @throws SqlParseException - */ - private void setWhere(Where where) throws SqlParseException { - if (where != null) { - QueryBuilder whereQuery = QueryMaker.explain(where); - request.filter(whereQuery); - } else { - request.filter(QueryBuilders.matchAllQuery()); - } - } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java index 077d9c28b8..ffc9695d81 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import org.opensearch.action.admin.indices.get.GetIndexRequestBuilder; @@ -14,22 +13,23 @@ public class DescribeQueryAction extends QueryAction { - private final IndexStatement statement; + private final IndexStatement statement; - public DescribeQueryAction(Client client, IndexStatement statement) { - super(client, null); - this.statement = statement; - } + public DescribeQueryAction(Client client, IndexStatement statement) { + super(client, null); + this.statement = statement; + } - @Override - public QueryStatement getQueryStatement() { - return statement; - } + @Override + public QueryStatement getQueryStatement() { + return statement; + } - @Override - public SqlOpenSearchRequestBuilder explain() { - final GetIndexRequestBuilder indexRequestBuilder = Util.prepareIndexRequestBuilder(client, statement); + @Override + public SqlOpenSearchRequestBuilder explain() { + final GetIndexRequestBuilder indexRequestBuilder = + Util.prepareIndexRequestBuilder(client, statement); - return new SqlOpenSearchRequestBuilder(indexRequestBuilder); - } + return new SqlOpenSearchRequestBuilder(indexRequestBuilder); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java index 06ec21247a..d767268cb1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.ArrayList; @@ -22,198 +21,198 @@ public class BackOffRetryStrategy { - private static final Logger LOG = LogManager.getLogger(); - - /** - * Interval (ms) between each retry - */ - private static final long[] intervals = milliseconds(new double[]{4, 8 + 4, 16 + 4}); + private static final Logger LOG = LogManager.getLogger(); - /** - * Delta to randomize interval (ms) - */ - private static final long delta = 4 * 1000; + /** Interval (ms) between each retry */ + private static final long[] intervals = milliseconds(new double[] {4, 8 + 4, 16 + 4}); - private static final int threshold = 85; + /** Delta to randomize interval (ms) */ + private static final long delta = 4 * 1000; - private static IdentityHashMap> memUse = new IdentityHashMap<>(); + private static final int threshold = 85; - private static AtomicLong mem = new AtomicLong(0L); + private static IdentityHashMap> memUse = new IdentityHashMap<>(); - private static long lastTimeoutCleanTime = System.currentTimeMillis(); + private static AtomicLong mem = new AtomicLong(0L); - private static final long RELTIMEOUT = 1000 * 60 * 30; + private static long lastTimeoutCleanTime = System.currentTimeMillis(); - private static final int MAXRETRIES = 999; + private static final long RELTIMEOUT = 1000 * 60 * 30; - private static final Object obj = new Object(); + private static final int MAXRETRIES = 999; - public static final Supplier GET_CB_STATE = () -> isMemoryHealthy() ? 0 : 1; + private static final Object obj = new Object(); - private BackOffRetryStrategy() { + public static final Supplier GET_CB_STATE = () -> isMemoryHealthy() ? 0 : 1; - } + private BackOffRetryStrategy() {} - private static boolean isMemoryHealthy() { - final long freeMemory = Runtime.getRuntime().freeMemory(); - final long totalMemory = Runtime.getRuntime().totalMemory(); - final int memoryUsage = (int) Math.round((double) (totalMemory - freeMemory + mem.get()) - / (double) totalMemory * 100); + private static boolean isMemoryHealthy() { + final long freeMemory = Runtime.getRuntime().freeMemory(); + final long totalMemory = Runtime.getRuntime().totalMemory(); + final int memoryUsage = + (int) + Math.round( + (double) (totalMemory - freeMemory + mem.get()) / (double) totalMemory * 100); - LOG.debug("[MCB1] Memory total, free, allocate: {}, {}, {}", totalMemory, freeMemory, mem.get()); - LOG.debug("[MCB1] Memory usage and limit: {}%, {}%", memoryUsage, threshold); + LOG.debug( + "[MCB1] Memory total, free, allocate: {}, {}, {}", totalMemory, freeMemory, mem.get()); + LOG.debug("[MCB1] Memory usage and limit: {}%, {}%", memoryUsage, threshold); - return memoryUsage < threshold; - } + return memoryUsage < threshold; + } - public static boolean isHealthy() { - for (int i = 0; i < intervals.length; i++) { - if (isMemoryHealthy()) { - return true; - } - - LOG.warn("[MCB1] Memory monitor is unhealthy now, back off retrying: {} attempt, thread id = {}", - i, Thread.currentThread().getId()); - if (ThreadLocalRandom.current().nextBoolean()) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); - LOG.warn("[MCB1] Directly abort on idx {}.", i); - return false; - } - backOffSleep(intervals[i]); - } - - boolean isHealthy = isMemoryHealthy(); - if (!isHealthy) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); - } + public static boolean isHealthy() { + for (int i = 0; i < intervals.length; i++) { + if (isMemoryHealthy()) { + return true; + } - return isHealthy; + LOG.warn( + "[MCB1] Memory monitor is unhealthy now, back off retrying: {} attempt, thread id = {}", + i, + Thread.currentThread().getId()); + if (ThreadLocalRandom.current().nextBoolean()) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); + LOG.warn("[MCB1] Directly abort on idx {}.", i); + return false; + } + backOffSleep(intervals[i]); } - private static boolean isMemoryHealthy(long allocateMemory, int idx, Object key) { - long logMem = mem.get(); - - releaseTimeoutMemory(); - if (idx == 0 && allocateMemory > 0) { - logMem = mem.addAndGet(allocateMemory); - synchronized (BackOffRetryStrategy.class) { - if (memUse.containsKey(key)) { - memUse.put(key, Tuple.tuple(memUse.get(key).v1(), memUse.get(key).v2() + allocateMemory)); - } else { - memUse.put(key, Tuple.tuple(System.currentTimeMillis(), allocateMemory)); - } - } - } - - final long freeMemory = Runtime.getRuntime().freeMemory(); - final long totalMemory = Runtime.getRuntime().totalMemory(); - final int memoryUsage = (int) Math.round((double) (totalMemory - freeMemory + logMem) - / (double) totalMemory * 100); + boolean isHealthy = isMemoryHealthy(); + if (!isHealthy) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); + } - LOG.debug("[MCB] Idx is {}", idx); - LOG.debug("[MCB] Memory total, free, allocate: {}, {}, {}, {}", totalMemory, freeMemory, - allocateMemory, logMem); - LOG.debug("[MCB] Memory usage and limit: {}%, {}%", memoryUsage, threshold); + return isHealthy; + } - return memoryUsage < threshold; + private static boolean isMemoryHealthy(long allocateMemory, int idx, Object key) { + long logMem = mem.get(); + releaseTimeoutMemory(); + if (idx == 0 && allocateMemory > 0) { + logMem = mem.addAndGet(allocateMemory); + synchronized (BackOffRetryStrategy.class) { + if (memUse.containsKey(key)) { + memUse.put(key, Tuple.tuple(memUse.get(key).v1(), memUse.get(key).v2() + allocateMemory)); + } else { + memUse.put(key, Tuple.tuple(System.currentTimeMillis(), allocateMemory)); + } + } } - public static boolean isHealthy(long allocateMemory, Object key) { - if (key == null) { - key = obj; - } + final long freeMemory = Runtime.getRuntime().freeMemory(); + final long totalMemory = Runtime.getRuntime().totalMemory(); + final int memoryUsage = + (int) Math.round((double) (totalMemory - freeMemory + logMem) / (double) totalMemory * 100); + + LOG.debug("[MCB] Idx is {}", idx); + LOG.debug( + "[MCB] Memory total, free, allocate: {}, {}, {}, {}", + totalMemory, + freeMemory, + allocateMemory, + logMem); + LOG.debug("[MCB] Memory usage and limit: {}%, {}%", memoryUsage, threshold); + + return memoryUsage < threshold; + } + + public static boolean isHealthy(long allocateMemory, Object key) { + if (key == null) { + key = obj; + } - for (int i = 0; i < intervals.length; i++) { - if (isMemoryHealthy(allocateMemory, i, key)) { - return true; - } - - LOG.warn("[MCB] Memory monitor is unhealthy now, back off retrying: {} attempt, " - + "executor = {}, thread id = {}", i, key, Thread.currentThread().getId()); - if (ThreadLocalRandom.current().nextBoolean()) { - LOG.warn("[MCB] Directly abort on idx {}, executor is {}.", i, key); - return false; - } - backOffSleep(intervals[i]); - } - return isMemoryHealthy(allocateMemory, MAXRETRIES, key); + for (int i = 0; i < intervals.length; i++) { + if (isMemoryHealthy(allocateMemory, i, key)) { + return true; + } + + LOG.warn( + "[MCB] Memory monitor is unhealthy now, back off retrying: {} attempt, " + + "executor = {}, thread id = {}", + i, + key, + Thread.currentThread().getId()); + if (ThreadLocalRandom.current().nextBoolean()) { + LOG.warn("[MCB] Directly abort on idx {}, executor is {}.", i, key); + return false; + } + backOffSleep(intervals[i]); } + return isMemoryHealthy(allocateMemory, MAXRETRIES, key); + } - public static void backOffSleep(long interval) { - try { - long millis = randomize(interval); + public static void backOffSleep(long interval) { + try { + long millis = randomize(interval); - LOG.info("[MCB] Back off sleeping: {} ms", millis); - Thread.sleep(millis); - } catch (InterruptedException e) { - LOG.error("[MCB] Sleep interrupted", e); - } + LOG.info("[MCB] Back off sleeping: {} ms", millis); + Thread.sleep(millis); + } catch (InterruptedException e) { + LOG.error("[MCB] Sleep interrupted", e); } - - /** - * Generate random interval in [interval-delta, interval+delta) - */ - private static long randomize(long interval) { - // Random number within range generator for JDK 7+ - return ThreadLocalRandom.current().nextLong( - lowerBound(interval), upperBound(interval) - ); + } + + /** Generate random interval in [interval-delta, interval+delta) */ + private static long randomize(long interval) { + // Random number within range generator for JDK 7+ + return ThreadLocalRandom.current().nextLong(lowerBound(interval), upperBound(interval)); + } + + private static long lowerBound(long interval) { + return Math.max(0, interval - delta); + } + + private static long upperBound(long interval) { + return interval + delta; + } + + private static long[] milliseconds(double[] seconds) { + return Arrays.stream(seconds).mapToLong((second) -> (long) (1000 * second)).toArray(); + } + + public static void releaseMem(Object key) { + LOG.debug("[MCB] mem is {} before release", mem); + long v = 0L; + synchronized (BackOffRetryStrategy.class) { + if (memUse.containsKey(key)) { + v = memUse.get(key).v2(); + memUse.remove(key); + } } - - private static long lowerBound(long interval) { - return Math.max(0, interval - delta); + if (v > 0) { + atomicMinusLowBoundZero(mem, v); } + LOG.debug("[MCB] mem is {} after release", mem); + } - private static long upperBound(long interval) { - return interval + delta; + private static void releaseTimeoutMemory() { + long cur = System.currentTimeMillis(); + if (cur - lastTimeoutCleanTime < RELTIMEOUT) { + return; } - private static long[] milliseconds(double[] seconds) { - return Arrays.stream(seconds). - mapToLong((second) -> (long) (1000 * second)). - toArray(); + List bulks = new ArrayList<>(); + Predicate> isTimeout = t -> cur - t.v1() > RELTIMEOUT; + synchronized (BackOffRetryStrategy.class) { + memUse.values().stream().filter(isTimeout).forEach(v -> bulks.add(v.v2())); + memUse.values().removeIf(isTimeout); } - public static void releaseMem(Object key) { - LOG.debug("[MCB] mem is {} before release", mem); - long v = 0L; - synchronized (BackOffRetryStrategy.class) { - if (memUse.containsKey(key)) { - v = memUse.get(key).v2(); - memUse.remove(key); - } - } - if (v > 0) { - atomicMinusLowBoundZero(mem, v); - } - LOG.debug("[MCB] mem is {} after release", mem); + for (long v : bulks) { + atomicMinusLowBoundZero(mem, v); } - private static void releaseTimeoutMemory() { - long cur = System.currentTimeMillis(); - if (cur - lastTimeoutCleanTime < RELTIMEOUT) { - return; - } + lastTimeoutCleanTime = cur; + } - List bulks = new ArrayList<>(); - Predicate> isTimeout = t -> cur - t.v1() > RELTIMEOUT; - synchronized (BackOffRetryStrategy.class) { - memUse.values().stream().filter(isTimeout).forEach(v -> bulks.add(v.v2())); - memUse.values().removeIf(isTimeout); - } - - for (long v : bulks) { - atomicMinusLowBoundZero(mem, v); - } - - lastTimeoutCleanTime = cur; - } - - private static void atomicMinusLowBoundZero(AtomicLong x, Long y) { - long memRes = x.addAndGet(-y); - if (memRes < 0) { - x.compareAndSet(memRes, 0L); - } + private static void atomicMinusLowBoundZero(AtomicLong x, Long y) { + long memRes = x.addAndGet(-y); + if (memRes < 0) { + x.compareAndSet(memRes, 0L); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java index 9c3f1104a7..dcb703cd33 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.maker; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; @@ -65,758 +64,788 @@ public class AggMaker { - /** - * The mapping bettwen group fieldName or Alias to the KVValue. - */ - private Map groupMap = new HashMap<>(); - private Where where; - - /** - * - * - * @param field - * @return - * @throws SqlParseException - */ - public AggregationBuilder makeGroupAgg(Field field) throws SqlParseException { - - if (field instanceof MethodField && field.getName().equals("script")) { - MethodField methodField = (MethodField) field; - TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(methodField.getAlias()) - .script(new Script(methodField.getParams().get(1).value.toString())); - extendGroupMap(methodField, new KVValue("KEY", termsBuilder)); - return termsBuilder; - } - - - if (field instanceof MethodField) { - - MethodField methodField = (MethodField) field; - if (methodField.getName().equals("filter")) { - Map paramsAsMap = methodField.getParamsAsMap(); - Where where = (Where) paramsAsMap.get("where"); - return AggregationBuilders.filter(paramsAsMap.get("alias").toString(), - QueryMaker.explain(where)); - } - return makeRangeGroup(methodField); - } else { - String termName = (Strings.isNullOrEmpty(field.getAlias())) ? field.getName() : field.getAlias(); - TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(termName).field(field.getName()); - final KVValue kvValue = new KVValue("KEY", termsBuilder); - groupMap.put(termName, kvValue); - // map the field name with KVValue if it is not yet. The use case is when alias exist, - // the termName is different with fieldName, both of them should be included in the map. - groupMap.putIfAbsent(field.getName(), kvValue); - return termsBuilder; - } + /** The mapping bettwen group fieldName or Alias to the KVValue. */ + private Map groupMap = new HashMap<>(); + + private Where where; + + /** + * @param field + * @return + * @throws SqlParseException + */ + public AggregationBuilder makeGroupAgg(Field field) throws SqlParseException { + + if (field instanceof MethodField && field.getName().equals("script")) { + MethodField methodField = (MethodField) field; + TermsAggregationBuilder termsBuilder = + AggregationBuilders.terms(methodField.getAlias()) + .script(new Script(methodField.getParams().get(1).value.toString())); + extendGroupMap(methodField, new KVValue("KEY", termsBuilder)); + return termsBuilder; } - - /** - * Create aggregation according to the SQL function. - * - * @param field SQL function - * @param parent parentAggregation - * @return AggregationBuilder represents the SQL function - * @throws SqlParseException in case of unrecognized function - */ - public AggregationBuilder makeFieldAgg(MethodField field, AggregationBuilder parent) throws SqlParseException { - extendGroupMap(field, new KVValue("FIELD", parent)); - ValuesSourceAggregationBuilder builder; - field.setAlias(fixAlias(field.getAlias())); - switch (field.getName().toUpperCase()) { - case "SUM": - builder = AggregationBuilders.sum(field.getAlias()); - return addFieldToAgg(field, builder); - case "MAX": - builder = AggregationBuilders.max(field.getAlias()); - return addFieldToAgg(field, builder); - case "MIN": - builder = AggregationBuilders.min(field.getAlias()); - return addFieldToAgg(field, builder); - case "AVG": - builder = AggregationBuilders.avg(field.getAlias()); - return addFieldToAgg(field, builder); - case "STATS": - builder = AggregationBuilders.stats(field.getAlias()); - return addFieldToAgg(field, builder); - case "EXTENDED_STATS": - builder = AggregationBuilders.extendedStats(field.getAlias()); - return addFieldToAgg(field, builder); - case "PERCENTILES": - builder = AggregationBuilders.percentiles(field.getAlias()); - addSpecificPercentiles((PercentilesAggregationBuilder) builder, field.getParams()); - return addFieldToAgg(field, builder); - case "TOPHITS": - return makeTopHitsAgg(field); - case "SCRIPTED_METRIC": - return scriptedMetric(field); - case "COUNT": - extendGroupMap(field, new KVValue("COUNT", parent)); - return addFieldToAgg(field, makeCountAgg(field)); - default: - throw new SqlParseException("the agg function not to define !"); - } + if (field instanceof MethodField) { + + MethodField methodField = (MethodField) field; + if (methodField.getName().equals("filter")) { + Map paramsAsMap = methodField.getParamsAsMap(); + Where where = (Where) paramsAsMap.get("where"); + return AggregationBuilders.filter( + paramsAsMap.get("alias").toString(), QueryMaker.explain(where)); + } + return makeRangeGroup(methodField); + } else { + String termName = + (Strings.isNullOrEmpty(field.getAlias())) ? field.getName() : field.getAlias(); + TermsAggregationBuilder termsBuilder = + AggregationBuilders.terms(termName).field(field.getName()); + final KVValue kvValue = new KVValue("KEY", termsBuilder); + groupMap.put(termName, kvValue); + // map the field name with KVValue if it is not yet. The use case is when alias exist, + // the termName is different with fieldName, both of them should be included in the map. + groupMap.putIfAbsent(field.getName(), kvValue); + return termsBuilder; } - - /** - * With {@link Where} Condition. - */ - public AggMaker withWhere(Where where) { - this.where = where; - return this; + } + + /** + * Create aggregation according to the SQL function. + * + * @param field SQL function + * @param parent parentAggregation + * @return AggregationBuilder represents the SQL function + * @throws SqlParseException in case of unrecognized function + */ + public AggregationBuilder makeFieldAgg(MethodField field, AggregationBuilder parent) + throws SqlParseException { + extendGroupMap(field, new KVValue("FIELD", parent)); + ValuesSourceAggregationBuilder builder; + field.setAlias(fixAlias(field.getAlias())); + switch (field.getName().toUpperCase()) { + case "SUM": + builder = AggregationBuilders.sum(field.getAlias()); + return addFieldToAgg(field, builder); + case "MAX": + builder = AggregationBuilders.max(field.getAlias()); + return addFieldToAgg(field, builder); + case "MIN": + builder = AggregationBuilders.min(field.getAlias()); + return addFieldToAgg(field, builder); + case "AVG": + builder = AggregationBuilders.avg(field.getAlias()); + return addFieldToAgg(field, builder); + case "STATS": + builder = AggregationBuilders.stats(field.getAlias()); + return addFieldToAgg(field, builder); + case "EXTENDED_STATS": + builder = AggregationBuilders.extendedStats(field.getAlias()); + return addFieldToAgg(field, builder); + case "PERCENTILES": + builder = AggregationBuilders.percentiles(field.getAlias()); + addSpecificPercentiles((PercentilesAggregationBuilder) builder, field.getParams()); + return addFieldToAgg(field, builder); + case "TOPHITS": + return makeTopHitsAgg(field); + case "SCRIPTED_METRIC": + return scriptedMetric(field); + case "COUNT": + extendGroupMap(field, new KVValue("COUNT", parent)); + return addFieldToAgg(field, makeCountAgg(field)); + default: + throw new SqlParseException("the agg function not to define !"); } - - private void addSpecificPercentiles(PercentilesAggregationBuilder percentilesBuilder, List params) { - List percentiles = new ArrayList<>(); - for (KVValue kValue : params) { - if (kValue.value.getClass().equals(BigDecimal.class)) { - BigDecimal percentile = (BigDecimal) kValue.value; - percentiles.add(percentile.doubleValue()); - - } else if (kValue.value instanceof Integer) { - percentiles.add(((Integer) kValue.value).doubleValue()); - } - } - if (percentiles.size() > 0) { - double[] percentilesArr = new double[percentiles.size()]; - int i = 0; - for (Double percentile : percentiles) { - percentilesArr[i] = percentile; - i++; - } - percentilesBuilder.percentiles(percentilesArr); - } + } + + /** With {@link Where} Condition. */ + public AggMaker withWhere(Where where) { + this.where = where; + return this; + } + + private void addSpecificPercentiles( + PercentilesAggregationBuilder percentilesBuilder, List params) { + List percentiles = new ArrayList<>(); + for (KVValue kValue : params) { + if (kValue.value.getClass().equals(BigDecimal.class)) { + BigDecimal percentile = (BigDecimal) kValue.value; + percentiles.add(percentile.doubleValue()); + + } else if (kValue.value instanceof Integer) { + percentiles.add(((Integer) kValue.value).doubleValue()); + } } - - private String fixAlias(String alias) { - //because [ is not legal as alias - return alias.replaceAll("\\[", "(").replaceAll("\\]", ")"); + if (percentiles.size() > 0) { + double[] percentilesArr = new double[percentiles.size()]; + int i = 0; + for (Double percentile : percentiles) { + percentilesArr[i] = percentile; + i++; + } + percentilesBuilder.percentiles(percentilesArr); } + } + + private String fixAlias(String alias) { + // because [ is not legal as alias + return alias.replaceAll("\\[", "(").replaceAll("\\]", ")"); + } + + private AggregationBuilder addFieldToAgg( + MethodField field, ValuesSourceAggregationBuilder builder) throws SqlParseException { + KVValue kvValue = field.getParams().get(0); + if (kvValue.key != null && kvValue.key.equals("script")) { + if (kvValue.value instanceof MethodField) { + return builder.script( + new Script(((MethodField) kvValue.value).getParams().get(1).toString())); + } else { + return builder.script(new Script(kvValue.value.toString())); + } + + } else if (kvValue.key != null && kvValue.value.toString().trim().startsWith("def")) { + return builder.script(new Script(kvValue.value.toString())); + } else if (kvValue.key != null + && (kvValue.key.equals("nested") || kvValue.key.equals("reverse_nested"))) { + NestedType nestedType = (NestedType) kvValue.value; + nestedType.addBucketPath(Path.getMetricPath(builder.getName())); + + if (nestedType.isNestedField()) { + builder.field("_index"); + } else { + builder.field(nestedType.field); + } + + AggregationBuilder nestedBuilder; + + String nestedAggName = nestedType.getNestedAggName(); + + if (nestedType.isReverse()) { + if (nestedType.path != null && nestedType.path.startsWith("~")) { + String realPath = nestedType.path.substring(1); + nestedBuilder = AggregationBuilders.nested(nestedAggName, realPath); + nestedBuilder = nestedBuilder.subAggregation(builder); + return AggregationBuilders.reverseNested(nestedAggName + "_REVERSED") + .subAggregation(nestedBuilder); + } else { + ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = + AggregationBuilders.reverseNested(nestedAggName); + if (nestedType.path != null) { + reverseNestedAggregationBuilder.path(nestedType.path); + } + nestedBuilder = reverseNestedAggregationBuilder; + } + } else { + nestedBuilder = AggregationBuilders.nested(nestedAggName, nestedType.path); + } - private AggregationBuilder addFieldToAgg(MethodField field, ValuesSourceAggregationBuilder builder) - throws SqlParseException { - KVValue kvValue = field.getParams().get(0); - if (kvValue.key != null && kvValue.key.equals("script")) { - if (kvValue.value instanceof MethodField) { - return builder.script(new Script(((MethodField) kvValue.value).getParams().get(1).toString())); - } else { - return builder.script(new Script(kvValue.value.toString())); - } - - } else if (kvValue.key != null && kvValue.value.toString().trim().startsWith("def")) { - return builder.script(new Script(kvValue.value.toString())); - } else if (kvValue.key != null && (kvValue.key.equals("nested") || kvValue.key.equals("reverse_nested"))) { - NestedType nestedType = (NestedType) kvValue.value; - nestedType.addBucketPath(Path.getMetricPath(builder.getName())); - - if (nestedType.isNestedField()) { - builder.field("_index"); - } else { - builder.field(nestedType.field); - } - - AggregationBuilder nestedBuilder; - - String nestedAggName = nestedType.getNestedAggName(); - - if (nestedType.isReverse()) { - if (nestedType.path != null && nestedType.path.startsWith("~")) { - String realPath = nestedType.path.substring(1); - nestedBuilder = AggregationBuilders.nested(nestedAggName, realPath); - nestedBuilder = nestedBuilder.subAggregation(builder); - return AggregationBuilders.reverseNested(nestedAggName + "_REVERSED") - .subAggregation(nestedBuilder); - } else { - ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = - AggregationBuilders.reverseNested(nestedAggName); - if (nestedType.path != null) { - reverseNestedAggregationBuilder.path(nestedType.path); - } - nestedBuilder = reverseNestedAggregationBuilder; - } - } else { - nestedBuilder = AggregationBuilders.nested(nestedAggName, nestedType.path); - } - - AggregationBuilder aggregation = nestedBuilder.subAggregation(wrapWithFilterAgg( - nestedType, - builder)); - nestedType.addBucketPath(Path.getAggPath(nestedBuilder.getName())); - return aggregation; - } else if (kvValue.key != null && (kvValue.key.equals("children"))) { - ChildrenType childrenType = (ChildrenType) kvValue.value; - - builder.field(childrenType.field); + AggregationBuilder aggregation = + nestedBuilder.subAggregation(wrapWithFilterAgg(nestedType, builder)); + nestedType.addBucketPath(Path.getAggPath(nestedBuilder.getName())); + return aggregation; + } else if (kvValue.key != null && (kvValue.key.equals("children"))) { + ChildrenType childrenType = (ChildrenType) kvValue.value; - AggregationBuilder childrenBuilder; + builder.field(childrenType.field); - String childrenAggName = childrenType.field + "@CHILDREN"; + AggregationBuilder childrenBuilder; - childrenBuilder = JoinAggregationBuilders.children(childrenAggName, childrenType.childType); + String childrenAggName = childrenType.field + "@CHILDREN"; - return childrenBuilder; - } + childrenBuilder = JoinAggregationBuilders.children(childrenAggName, childrenType.childType); - return builder.field(kvValue.toString()); + return childrenBuilder; } - private AggregationBuilder makeRangeGroup(MethodField field) throws SqlParseException { - switch (field.getName().toLowerCase()) { - case "range": - return rangeBuilder(field); - case "date_histogram": - return dateHistogram(field); - case "date_range": - case "month": - return dateRange(field); - case "histogram": - return histogram(field); - case "geohash_grid": - return geohashGrid(field); - case "geo_bounds": - return geoBounds(field); - case "terms": - return termsAgg(field); - default: - throw new SqlParseException("can define this method " + field); - } - + return builder.field(kvValue.toString()); + } + + private AggregationBuilder makeRangeGroup(MethodField field) throws SqlParseException { + switch (field.getName().toLowerCase()) { + case "range": + return rangeBuilder(field); + case "date_histogram": + return dateHistogram(field); + case "date_range": + case "month": + return dateRange(field); + case "histogram": + return histogram(field); + case "geohash_grid": + return geohashGrid(field); + case "geo_bounds": + return geoBounds(field); + case "terms": + return termsAgg(field); + default: + throw new SqlParseException("can define this method " + field); } - - private AggregationBuilder geoBounds(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - GeoBoundsAggregationBuilder boundsBuilder = new GeoBoundsAggregationBuilder(aggName); - String value; - for (KVValue kv : field.getParams()) { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "field": - boundsBuilder.field(value); - break; - case "wrap_longitude": - boundsBuilder.wrapLongitude(Boolean.getBoolean(value)); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("geo_bounds err or not define field " + kv.toString()); - } - } - return boundsBuilder; + } + + private AggregationBuilder geoBounds(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + GeoBoundsAggregationBuilder boundsBuilder = new GeoBoundsAggregationBuilder(aggName); + String value; + for (KVValue kv : field.getParams()) { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "field": + boundsBuilder.field(value); + break; + case "wrap_longitude": + boundsBuilder.wrapLongitude(Boolean.getBoolean(value)); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("geo_bounds err or not define field " + kv.toString()); + } } - - private AggregationBuilder termsAgg(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - TermsAggregationBuilder terms = AggregationBuilders.terms(aggName); - String value; - IncludeExclude include = null, exclude = null; - for (KVValue kv : field.getParams()) { - if (kv.value.toString().contains("doc[")) { - String script = kv.value + "; return " + kv.key; - terms.script(new Script(script)); + return boundsBuilder; + } + + private AggregationBuilder termsAgg(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + TermsAggregationBuilder terms = AggregationBuilders.terms(aggName); + String value; + IncludeExclude include = null, exclude = null; + for (KVValue kv : field.getParams()) { + if (kv.value.toString().contains("doc[")) { + String script = kv.value + "; return " + kv.key; + terms.script(new Script(script)); + } else { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "field": + terms.field(value); + break; + case "size": + terms.size(Integer.parseInt(value)); + break; + case "shard_size": + terms.shardSize(Integer.parseInt(value)); + break; + case "min_doc_count": + terms.minDocCount(Integer.parseInt(value)); + break; + case "missing": + terms.missing(value); + break; + case "order": + if ("asc".equalsIgnoreCase(value)) { + terms.order(BucketOrder.key(true)); + } else if ("desc".equalsIgnoreCase(value)) { + terms.order(BucketOrder.key(false)); } else { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "field": - terms.field(value); - break; - case "size": - terms.size(Integer.parseInt(value)); - break; - case "shard_size": - terms.shardSize(Integer.parseInt(value)); - break; - case "min_doc_count": - terms.minDocCount(Integer.parseInt(value)); - break; - case "missing": - terms.missing(value); - break; - case "order": - if ("asc".equalsIgnoreCase(value)) { - terms.order(BucketOrder.key(true)); - } else if ("desc".equalsIgnoreCase(value)) { - terms.order(BucketOrder.key(false)); - } else { - List orderElements = new ArrayList<>(); - try (JsonXContentParser parser = new JsonXContentParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, new JsonFactory().createParser(value))) { - XContentParser.Token currentToken = parser.nextToken(); - if (currentToken == XContentParser.Token.START_OBJECT) { - orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); - } else if (currentToken == XContentParser.Token.START_ARRAY) { - for (currentToken = parser.nextToken(); - currentToken != XContentParser.Token.END_ARRAY; - currentToken = parser.nextToken()) { - if (currentToken == XContentParser.Token.START_OBJECT) { - orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); - } else { - throw new ParsingException(parser.getTokenLocation(), - "Invalid token in order array"); - } - } - } - } catch (IOException e) { - throw new SqlParseException("couldn't parse order: " + e.getMessage()); - } - terms.order(orderElements); - } - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - case "execution_hint": - terms.executionHint(value); - break; - case "include": - try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, value)) { - parser.nextToken(); - include = IncludeExclude.parseInclude(parser); - } catch (IOException e) { - throw new SqlParseException("parse include[" + value + "] error: " + e.getMessage()); - } - break; - case "exclude": - try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, value)) { - parser.nextToken(); - exclude = IncludeExclude.parseExclude(parser); - } catch (IOException e) { - throw new SqlParseException("parse exclude[" + value + "] error: " + e.getMessage()); - } - break; - default: - throw new SqlParseException("terms aggregation err or not define field " + kv.toString()); + List orderElements = new ArrayList<>(); + try (JsonXContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(value))) { + XContentParser.Token currentToken = parser.nextToken(); + if (currentToken == XContentParser.Token.START_OBJECT) { + orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); + } else if (currentToken == XContentParser.Token.START_ARRAY) { + for (currentToken = parser.nextToken(); + currentToken != XContentParser.Token.END_ARRAY; + currentToken = parser.nextToken()) { + if (currentToken == XContentParser.Token.START_OBJECT) { + orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); + } else { + throw new ParsingException( + parser.getTokenLocation(), "Invalid token in order array"); + } + } } + } catch (IOException e) { + throw new SqlParseException("couldn't parse order: " + e.getMessage()); + } + terms.order(orderElements); } - } - terms.includeExclude(IncludeExclude.merge(include, exclude)); - return terms; - } - - private AbstractAggregationBuilder scriptedMetric(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - ScriptedMetricAggregationBuilder scriptedMetricBuilder = AggregationBuilders.scriptedMetric(aggName); - Map scriptedMetricParams = field.getParamsAsMap(); - if (!scriptedMetricParams.containsKey("map_script") && !scriptedMetricParams.containsKey("map_script_id") - && !scriptedMetricParams.containsKey("map_script_file")) { - throw new SqlParseException( - "scripted metric parameters must contain map_script/map_script_id/map_script_file parameter"); - } - HashMap scriptAdditionalParams = new HashMap<>(); - HashMap reduceScriptAdditionalParams = new HashMap<>(); - for (Map.Entry param : scriptedMetricParams.entrySet()) { - String paramValue = param.getValue().toString(); - if (param.getKey().startsWith("@")) { - if (param.getKey().startsWith("@reduce_")) { - reduceScriptAdditionalParams.put(param.getKey().replace("@reduce_", ""), - param.getValue()); - } else { - scriptAdditionalParams.put(param.getKey().replace("@", ""), param.getValue()); - } - continue; + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + case "execution_hint": + terms.executionHint(value); + break; + case "include": + try (XContentParser parser = + JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, value)) { + parser.nextToken(); + include = IncludeExclude.parseInclude(parser); + } catch (IOException e) { + throw new SqlParseException("parse include[" + value + "] error: " + e.getMessage()); } - - switch (param.getKey().toLowerCase()) { - case "map_script": - scriptedMetricBuilder.mapScript(new Script(paramValue)); - break; - case "map_script_id": - scriptedMetricBuilder.mapScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, new HashMap<>())); - break; - case "init_script": - scriptedMetricBuilder.initScript(new Script(paramValue)); - break; - case "init_script_id": - scriptedMetricBuilder.initScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, new HashMap<>())); - break; - case "combine_script": - scriptedMetricBuilder.combineScript(new Script(paramValue)); - break; - case "combine_script_id": - scriptedMetricBuilder.combineScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, new HashMap<>())); - break; - case "reduce_script": - scriptedMetricBuilder.reduceScript(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, - paramValue, reduceScriptAdditionalParams)); - break; - case "reduce_script_id": - scriptedMetricBuilder.reduceScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, reduceScriptAdditionalParams)); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("scripted_metric err or not define field " + param.getKey()); + break; + case "exclude": + try (XContentParser parser = + JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, value)) { + parser.nextToken(); + exclude = IncludeExclude.parseExclude(parser); + } catch (IOException e) { + throw new SqlParseException("parse exclude[" + value + "] error: " + e.getMessage()); } + break; + default: + throw new SqlParseException( + "terms aggregation err or not define field " + kv.toString()); } - if (scriptAdditionalParams.size() > 0) { - scriptAdditionalParams.put("_agg", new HashMap<>()); - scriptedMetricBuilder.params(scriptAdditionalParams); - } - - return scriptedMetricBuilder; + } } - - private AggregationBuilder geohashGrid(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - GeoGridAggregationBuilder geoHashGrid = new GeoHashGridAggregationBuilder(aggName); - String value; - for (KVValue kv : field.getParams()) { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "precision": - geoHashGrid.precision(Integer.parseInt(value)); - break; - case "field": - geoHashGrid.field(value); - break; - case "size": - geoHashGrid.size(Integer.parseInt(value)); - break; - case "shard_size": - geoHashGrid.shardSize(Integer.parseInt(value)); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("geohash grid err or not define field " + kv.toString()); - } - } - return geoHashGrid; + terms.includeExclude(IncludeExclude.merge(include, exclude)); + return terms; + } + + private AbstractAggregationBuilder scriptedMetric(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + ScriptedMetricAggregationBuilder scriptedMetricBuilder = + AggregationBuilders.scriptedMetric(aggName); + Map scriptedMetricParams = field.getParamsAsMap(); + if (!scriptedMetricParams.containsKey("map_script") + && !scriptedMetricParams.containsKey("map_script_id") + && !scriptedMetricParams.containsKey("map_script_file")) { + throw new SqlParseException( + "scripted metric parameters must contain map_script/map_script_id/map_script_file" + + " parameter"); } - - private static final String TIME_FARMAT = "yyyy-MM-dd HH:mm:ss"; - - private ValuesSourceAggregationBuilder dateRange(MethodField field) { - String alias = gettAggNameFromParamsOrAlias(field); - DateRangeAggregationBuilder dateRange = AggregationBuilders.dateRange(alias).format(TIME_FARMAT); - - String value; - List ranges = new ArrayList<>(); - for (KVValue kv : field.getParams()) { - value = kv.value.toString(); - if ("field".equals(kv.key)) { - dateRange.field(value); - } else if ("format".equals(kv.key)) { - dateRange.format(value); - } else if ("time_zone".equals(kv.key)) { - dateRange.timeZone(ZoneOffset.of(value)); - } else if ("from".equals(kv.key)) { - dateRange.addUnboundedFrom(kv.value.toString()); - } else if ("to".equals(kv.key)) { - dateRange.addUnboundedTo(kv.value.toString()); - } else if (!"alias".equals(kv.key) && !"nested".equals(kv.key) && !"children".equals(kv.key)) { - ranges.add(value); - } - } - - for (int i = 1; i < ranges.size(); i++) { - dateRange.addRange(ranges.get(i - 1), ranges.get(i)); + HashMap scriptAdditionalParams = new HashMap<>(); + HashMap reduceScriptAdditionalParams = new HashMap<>(); + for (Map.Entry param : scriptedMetricParams.entrySet()) { + String paramValue = param.getValue().toString(); + if (param.getKey().startsWith("@")) { + if (param.getKey().startsWith("@reduce_")) { + reduceScriptAdditionalParams.put( + param.getKey().replace("@reduce_", ""), param.getValue()); + } else { + scriptAdditionalParams.put(param.getKey().replace("@", ""), param.getValue()); } + continue; + } + + switch (param.getKey().toLowerCase()) { + case "map_script": + scriptedMetricBuilder.mapScript(new Script(paramValue)); + break; + case "map_script_id": + scriptedMetricBuilder.mapScript( + new Script( + ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, paramValue, new HashMap<>())); + break; + case "init_script": + scriptedMetricBuilder.initScript(new Script(paramValue)); + break; + case "init_script_id": + scriptedMetricBuilder.initScript( + new Script( + ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, paramValue, new HashMap<>())); + break; + case "combine_script": + scriptedMetricBuilder.combineScript(new Script(paramValue)); + break; + case "combine_script_id": + scriptedMetricBuilder.combineScript( + new Script( + ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, paramValue, new HashMap<>())); + break; + case "reduce_script": + scriptedMetricBuilder.reduceScript( + new Script( + ScriptType.INLINE, + Script.DEFAULT_SCRIPT_LANG, + paramValue, + reduceScriptAdditionalParams)); + break; + case "reduce_script_id": + scriptedMetricBuilder.reduceScript( + new Script( + ScriptType.STORED, + Script.DEFAULT_SCRIPT_LANG, + paramValue, + reduceScriptAdditionalParams)); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("scripted_metric err or not define field " + param.getKey()); + } + } + if (scriptAdditionalParams.size() > 0) { + scriptAdditionalParams.put("_agg", new HashMap<>()); + scriptedMetricBuilder.params(scriptAdditionalParams); + } - return dateRange; + return scriptedMetricBuilder; + } + + private AggregationBuilder geohashGrid(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + GeoGridAggregationBuilder geoHashGrid = new GeoHashGridAggregationBuilder(aggName); + String value; + for (KVValue kv : field.getParams()) { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "precision": + geoHashGrid.precision(Integer.parseInt(value)); + break; + case "field": + geoHashGrid.field(value); + break; + case "size": + geoHashGrid.size(Integer.parseInt(value)); + break; + case "shard_size": + geoHashGrid.shardSize(Integer.parseInt(value)); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("geohash grid err or not define field " + kv.toString()); + } + } + return geoHashGrid; + } + + private static final String TIME_FARMAT = "yyyy-MM-dd HH:mm:ss"; + + private ValuesSourceAggregationBuilder dateRange(MethodField field) { + String alias = gettAggNameFromParamsOrAlias(field); + DateRangeAggregationBuilder dateRange = + AggregationBuilders.dateRange(alias).format(TIME_FARMAT); + + String value; + List ranges = new ArrayList<>(); + for (KVValue kv : field.getParams()) { + value = kv.value.toString(); + if ("field".equals(kv.key)) { + dateRange.field(value); + } else if ("format".equals(kv.key)) { + dateRange.format(value); + } else if ("time_zone".equals(kv.key)) { + dateRange.timeZone(ZoneOffset.of(value)); + } else if ("from".equals(kv.key)) { + dateRange.addUnboundedFrom(kv.value.toString()); + } else if ("to".equals(kv.key)) { + dateRange.addUnboundedTo(kv.value.toString()); + } else if (!"alias".equals(kv.key) + && !"nested".equals(kv.key) + && !"children".equals(kv.key)) { + ranges.add(value); + } } - /** - * - * - * @param field - * @return - * @throws SqlParseException - */ - private DateHistogramAggregationBuilder dateHistogram(MethodField field) throws SqlParseException { - String alias = gettAggNameFromParamsOrAlias(field); - DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(alias).format(TIME_FARMAT); - String value; - for (KVValue kv : field.getParams()) { - if (kv.value.toString().contains("doc[")) { - String script = kv.value + "; return " + kv.key; - dateHistogram.script(new Script(script)); - } else { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "interval": - dateHistogram.dateHistogramInterval(new DateHistogramInterval(kv.value.toString())); - break; - case "fixed_interval": - dateHistogram.fixedInterval(new DateHistogramInterval(kv.value.toString())); - break; - case "field": - dateHistogram.field(value); - break; - case "format": - dateHistogram.format(value); - break; - case "time_zone": - dateHistogram.timeZone(ZoneOffset.of(value)); - break; - case "min_doc_count": - dateHistogram.minDocCount(Long.parseLong(value)); - break; - case "order": - dateHistogram.order("desc".equalsIgnoreCase(value) ? BucketOrder.key(false) : - BucketOrder.key(true)); - break; - case "extended_bounds": - String[] bounds = value.split(":"); - if (bounds.length == 2) { - dateHistogram.extendedBounds(new LongBounds(bounds[0], bounds[1])); - } - break; - - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("date range err or not define field " + kv.toString()); - } - } - } - return dateHistogram; + for (int i = 1; i < ranges.size(); i++) { + dateRange.addRange(ranges.get(i - 1), ranges.get(i)); } - private String gettAggNameFromParamsOrAlias(MethodField field) { - String alias = field.getAlias(); - for (KVValue kv : field.getParams()) { - if (kv.key != null && kv.key.equals("alias")) { - alias = kv.value.toString(); + return dateRange; + } + + /** + * @param field + * @return + * @throws SqlParseException + */ + private DateHistogramAggregationBuilder dateHistogram(MethodField field) + throws SqlParseException { + String alias = gettAggNameFromParamsOrAlias(field); + DateHistogramAggregationBuilder dateHistogram = + AggregationBuilders.dateHistogram(alias).format(TIME_FARMAT); + String value; + for (KVValue kv : field.getParams()) { + if (kv.value.toString().contains("doc[")) { + String script = kv.value + "; return " + kv.key; + dateHistogram.script(new Script(script)); + } else { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "interval": + dateHistogram.dateHistogramInterval(new DateHistogramInterval(kv.value.toString())); + break; + case "fixed_interval": + dateHistogram.fixedInterval(new DateHistogramInterval(kv.value.toString())); + break; + case "field": + dateHistogram.field(value); + break; + case "format": + dateHistogram.format(value); + break; + case "time_zone": + dateHistogram.timeZone(ZoneOffset.of(value)); + break; + case "min_doc_count": + dateHistogram.minDocCount(Long.parseLong(value)); + break; + case "order": + dateHistogram.order( + "desc".equalsIgnoreCase(value) ? BucketOrder.key(false) : BucketOrder.key(true)); + break; + case "extended_bounds": + String[] bounds = value.split(":"); + if (bounds.length == 2) { + dateHistogram.extendedBounds(new LongBounds(bounds[0], bounds[1])); } + break; + + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("date range err or not define field " + kv.toString()); } - return alias; + } } - - private HistogramAggregationBuilder histogram(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - HistogramAggregationBuilder histogram = AggregationBuilders.histogram(aggName); - String value; - for (KVValue kv : field.getParams()) { - if (kv.value.toString().contains("doc[")) { - String script = kv.value + "; return " + kv.key; - histogram.script(new Script(script)); - } else { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "interval": - histogram.interval(Long.parseLong(value)); - break; - case "field": - histogram.field(value); - break; - case "min_doc_count": - histogram.minDocCount(Long.parseLong(value)); - break; - case "extended_bounds": - String[] bounds = value.split(":"); - if (bounds.length == 2) { - histogram.extendedBounds(Long.valueOf(bounds[0]), Long.valueOf(bounds[1])); - } - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - case "order": - final BucketOrder order; - switch (value) { - case "key_desc": - order = BucketOrder.key(false); - break; - case "count_asc": - order = BucketOrder.count(true); - break; - case "count_desc": - order = BucketOrder.count(false); - break; - case "key_asc": - default: - order = BucketOrder.key(true); - break; - } - histogram.order(order); - break; - default: - throw new SqlParseException("histogram err or not define field " + kv.toString()); - } + return dateHistogram; + } + + private String gettAggNameFromParamsOrAlias(MethodField field) { + String alias = field.getAlias(); + for (KVValue kv : field.getParams()) { + if (kv.key != null && kv.key.equals("alias")) { + alias = kv.value.toString(); + } + } + return alias; + } + + private HistogramAggregationBuilder histogram(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + HistogramAggregationBuilder histogram = AggregationBuilders.histogram(aggName); + String value; + for (KVValue kv : field.getParams()) { + if (kv.value.toString().contains("doc[")) { + String script = kv.value + "; return " + kv.key; + histogram.script(new Script(script)); + } else { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "interval": + histogram.interval(Long.parseLong(value)); + break; + case "field": + histogram.field(value); + break; + case "min_doc_count": + histogram.minDocCount(Long.parseLong(value)); + break; + case "extended_bounds": + String[] bounds = value.split(":"); + if (bounds.length == 2) { + histogram.extendedBounds(Long.valueOf(bounds[0]), Long.valueOf(bounds[1])); + } + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + case "order": + final BucketOrder order; + switch (value) { + case "key_desc": + order = BucketOrder.key(false); + break; + case "count_asc": + order = BucketOrder.count(true); + break; + case "count_desc": + order = BucketOrder.count(false); + break; + case "key_asc": + default: + order = BucketOrder.key(true); + break; } + histogram.order(order); + break; + default: + throw new SqlParseException("histogram err or not define field " + kv.toString()); } - return histogram; + } } + return histogram; + } - /** - * - * - * @param field - * @return - */ - private RangeAggregationBuilder rangeBuilder(MethodField field) { + /** + * @param field + * @return + */ + private RangeAggregationBuilder rangeBuilder(MethodField field) { - // ignore alias param - LinkedList params = field.getParams().stream().filter(kv -> !"alias".equals(kv.key)) - .collect(Collectors.toCollection(LinkedList::new)); + // ignore alias param + LinkedList params = + field.getParams().stream() + .filter(kv -> !"alias".equals(kv.key)) + .collect(Collectors.toCollection(LinkedList::new)); - String fieldName = params.poll().toString(); + String fieldName = params.poll().toString(); - double[] ds = Util.KV2DoubleArr(params); + double[] ds = Util.KV2DoubleArr(params); - RangeAggregationBuilder range = AggregationBuilders.range(field.getAlias()).field(fieldName); + RangeAggregationBuilder range = AggregationBuilders.range(field.getAlias()).field(fieldName); - for (int i = 1; i < ds.length; i++) { - range.addRange(ds[i - 1], ds[i]); - } - - return range; + for (int i = 1; i < ds.length; i++) { + range.addRange(ds[i - 1], ds[i]); } + return range; + } + + /** + * Create count aggregation. + * + * @param field The count function + * @return AggregationBuilder use to count result + */ + private ValuesSourceAggregationBuilder makeCountAgg(MethodField field) { + + // Cardinality is approximate DISTINCT. + if (SQLAggregateOption.DISTINCT.equals(field.getOption())) { + + if (field.getParams().size() == 1) { + return AggregationBuilders.cardinality(field.getAlias()) + .field(field.getParams().get(0).value.toString()); + } else { + Integer precision_threshold = (Integer) (field.getParams().get(1).value); + return AggregationBuilders.cardinality(field.getAlias()) + .precisionThreshold(precision_threshold) + .field(field.getParams().get(0).value.toString()); + } + } - /** - * Create count aggregation. - * - * @param field The count function - * @return AggregationBuilder use to count result - */ - private ValuesSourceAggregationBuilder makeCountAgg(MethodField field) { - - // Cardinality is approximate DISTINCT. - if (SQLAggregateOption.DISTINCT.equals(field.getOption())) { - - if (field.getParams().size() == 1) { - return AggregationBuilders.cardinality(field.getAlias()).field(field.getParams().get(0).value - .toString()); - } else { - Integer precision_threshold = (Integer) (field.getParams().get(1).value); - return AggregationBuilders.cardinality(field.getAlias()).precisionThreshold(precision_threshold) - .field(field.getParams().get(0).value.toString()); - } - - } - - String fieldName = field.getParams().get(0).value.toString(); + String fieldName = field.getParams().get(0).value.toString(); - // In case of count(*) we use '_index' as field parameter to count all documents - if ("*".equals(fieldName)) { - KVValue kvValue = new KVValue(null, "_index"); - field.getParams().set(0, kvValue); - return AggregationBuilders.count(field.getAlias()).field(kvValue.toString()); - } else { - return AggregationBuilders.count(field.getAlias()).field(fieldName); - } + // In case of count(*) we use '_index' as field parameter to count all documents + if ("*".equals(fieldName)) { + KVValue kvValue = new KVValue(null, "_index"); + field.getParams().set(0, kvValue); + return AggregationBuilders.count(field.getAlias()).field(kvValue.toString()); + } else { + return AggregationBuilders.count(field.getAlias()).field(fieldName); } - - /** - * TOPHITS - * - * @param field - * @return - */ - private AbstractAggregationBuilder makeTopHitsAgg(MethodField field) { - String alias = gettAggNameFromParamsOrAlias(field); - TopHitsAggregationBuilder topHits = AggregationBuilders.topHits(alias); - List params = field.getParams(); - String[] include = null; - String[] exclude = null; - for (KVValue kv : params) { - switch (kv.key) { - case "from": - topHits.from((int) kv.value); - break; - case "size": - topHits.size((int) kv.value); - break; - case "include": - include = kv.value.toString().split(","); - break; - case "exclude": - exclude = kv.value.toString().split(","); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - topHits.sort(kv.key, SortOrder.valueOf(kv.value.toString().toUpperCase())); - break; - } - } - if (include != null || exclude != null) { - topHits.fetchSource(include, exclude); - } - return topHits; + } + + /** + * TOPHITS + * + * @param field + * @return + */ + private AbstractAggregationBuilder makeTopHitsAgg(MethodField field) { + String alias = gettAggNameFromParamsOrAlias(field); + TopHitsAggregationBuilder topHits = AggregationBuilders.topHits(alias); + List params = field.getParams(); + String[] include = null; + String[] exclude = null; + for (KVValue kv : params) { + switch (kv.key) { + case "from": + topHits.from((int) kv.value); + break; + case "size": + topHits.size((int) kv.value); + break; + case "include": + include = kv.value.toString().split(","); + break; + case "exclude": + exclude = kv.value.toString().split(","); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + topHits.sort(kv.key, SortOrder.valueOf(kv.value.toString().toUpperCase())); + break; + } } - - public Map getGroupMap() { - return this.groupMap; + if (include != null || exclude != null) { + topHits.fetchSource(include, exclude); } - - /** - * Wrap the Metric Aggregation with Filter Aggregation if necessary. - * The Filter Aggregation condition is constructed from the nested condition in where clause. - */ - private AggregationBuilder wrapWithFilterAgg(NestedType nestedType, ValuesSourceAggregationBuilder builder) - throws SqlParseException { - if (where != null && where.getWheres() != null) { - List nestedConditionList = where.getWheres().stream() - .filter(condition -> condition instanceof Condition) - .map(condition -> (Condition) condition) - .filter(condition -> condition.isNestedComplex() - || nestedType.path.equalsIgnoreCase(condition.getNestedPath())) - // ignore the OR condition on nested field. - .filter(condition -> CONN.AND.equals(condition.getConn())) - .collect(Collectors.toList()); - if (!nestedConditionList.isEmpty()) { - Where filterWhere = new Where(where.getConn()); - nestedConditionList.forEach(condition -> { - if (condition.isNestedComplex()) { - ((Where) condition.getValue()).getWheres().forEach(filterWhere::addWhere); - } else { - // Since the filter condition is used inside Nested Aggregation,remove the nested attribute. - condition.setNested(false); - condition.setNestedPath(""); - filterWhere.addWhere(condition); - } - }); - FilterAggregationBuilder filterAgg = AggregationBuilders.filter( - nestedType.getFilterAggName(), - QueryMaker.explain(filterWhere)); - nestedType.addBucketPath(Path.getAggPath(filterAgg.getName())); - return filterAgg.subAggregation(builder); - } - } - return builder; + return topHits; + } + + public Map getGroupMap() { + return this.groupMap; + } + + /** + * Wrap the Metric Aggregation with Filter Aggregation if necessary. The Filter Aggregation + * condition is constructed from the nested condition in where clause. + */ + private AggregationBuilder wrapWithFilterAgg( + NestedType nestedType, ValuesSourceAggregationBuilder builder) throws SqlParseException { + if (where != null && where.getWheres() != null) { + List nestedConditionList = + where.getWheres().stream() + .filter(condition -> condition instanceof Condition) + .map(condition -> (Condition) condition) + .filter( + condition -> + condition.isNestedComplex() + || nestedType.path.equalsIgnoreCase(condition.getNestedPath())) + // ignore the OR condition on nested field. + .filter(condition -> CONN.AND.equals(condition.getConn())) + .collect(Collectors.toList()); + if (!nestedConditionList.isEmpty()) { + Where filterWhere = new Where(where.getConn()); + nestedConditionList.forEach( + condition -> { + if (condition.isNestedComplex()) { + ((Where) condition.getValue()).getWheres().forEach(filterWhere::addWhere); + } else { + // Since the filter condition is used inside Nested Aggregation,remove the nested + // attribute. + condition.setNested(false); + condition.setNestedPath(""); + filterWhere.addWhere(condition); + } + }); + FilterAggregationBuilder filterAgg = + AggregationBuilders.filter( + nestedType.getFilterAggName(), QueryMaker.explain(filterWhere)); + nestedType.addBucketPath(Path.getAggPath(filterAgg.getName())); + return filterAgg.subAggregation(builder); + } } - - /** - * The groupMap is used when parsing order by to find out the corresponding field in aggregation. - * There are two cases. - * 1) using alias in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY c - * 2) using full name in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY COUNT(*) - * Then, the groupMap should support these two cases by maintain the mapping of - * {alias, value} and {full_name, value} - */ - private void extendGroupMap(Field field, KVValue value) { - groupMap.put(field.toString(), value); - if (!StringUtils.isEmpty(field.getAlias())) { - groupMap.putIfAbsent(field.getAlias(), value); - } + return builder; + } + + /** + * The groupMap is used when parsing order by to find out the corresponding field in aggregation. + * There are two cases. + * + *

    + *
  1. using alias in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY c + *
  2. using full name in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY + * COUNT(*) + *
+ * + * Then, the groupMap should support these two cases by maintain the mapping of {alias, value} and + * {full_name, value} + */ + private void extendGroupMap(Field field, KVValue value) { + groupMap.put(field.toString(), value); + if (!StringUtils.isEmpty(field.getAlias())) { + groupMap.putIfAbsent(field.getAlias(), value); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java index 01a0e78484..a8fb7cc53c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; @@ -17,63 +16,63 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; import org.opensearch.sql.legacy.query.planner.physical.node.scroll.PhysicalScroll; -/** - * The definition of QueryPlanner which return the {@link BindingTuple} as result. - */ +/** The definition of QueryPlanner which return the {@link BindingTuple} as result. */ public class BindingTupleQueryPlanner { - private PhysicalOperator physicalOperator; - @Getter - private List columnNodes; - - public BindingTupleQueryPlanner(Client client, SQLQueryExpr sqlExpr, ColumnTypeProvider columnTypeProvider) { - SQLToOperatorConverter converter = new SQLToOperatorConverter(client, columnTypeProvider); - sqlExpr.accept(converter); - this.physicalOperator = converter.getPhysicalOperator(); - this.columnNodes = converter.getColumnNodes(); - } + private PhysicalOperator physicalOperator; + @Getter private List columnNodes; - /** - * Execute the QueryPlanner. - * @return list of {@link BindingTuple}. - */ - public List execute() { - PhysicalOperator op = physicalOperator; - List tuples = new ArrayList<>(); - try { - op.open(null); - } catch (Exception e) { - throw new RuntimeException(e); - } + public BindingTupleQueryPlanner( + Client client, SQLQueryExpr sqlExpr, ColumnTypeProvider columnTypeProvider) { + SQLToOperatorConverter converter = new SQLToOperatorConverter(client, columnTypeProvider); + sqlExpr.accept(converter); + this.physicalOperator = converter.getPhysicalOperator(); + this.columnNodes = converter.getColumnNodes(); + } - while (op.hasNext()) { - tuples.add(op.next().data()); - } - return tuples; + /** + * Execute the QueryPlanner. + * + * @return list of {@link BindingTuple}. + */ + public List execute() { + PhysicalOperator op = physicalOperator; + List tuples = new ArrayList<>(); + try { + op.open(null); + } catch (Exception e) { + throw new RuntimeException(e); } - /** - * Explain the physical execution plan. - * @return execution plan. - */ - public String explain() { - Explanation explanation = new Explanation(); - physicalOperator.accept(explanation); - return explanation.explain(); + while (op.hasNext()) { + tuples.add(op.next().data()); } + return tuples; + } + + /** + * Explain the physical execution plan. + * + * @return execution plan. + */ + public String explain() { + Explanation explanation = new Explanation(); + physicalOperator.accept(explanation); + return explanation.explain(); + } - private static class Explanation implements PlanNode.Visitor { - private String explain; + private static class Explanation implements PlanNode.Visitor { + private String explain; - public String explain() { - return explain; - } + public String explain() { + return explain; + } - @Override - public boolean visit(PlanNode planNode) { - if (planNode instanceof PhysicalScroll) { - explain = planNode.toString(); - } - return true; - } + @Override + public boolean visit(PlanNode planNode) { + if (planNode instanceof PhysicalScroll) { + explain = planNode.toString(); + } + return true; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java index 753d5ac001..9dd969fb83 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import com.google.common.base.Strings; @@ -14,20 +13,18 @@ import org.opensearch.sql.legacy.executor.format.Schema; import org.opensearch.sql.legacy.expression.core.Expression; -/** - * The definition of column node. - */ +/** The definition of column node. */ @Builder @Setter @Getter @ToString public class ColumnNode { - private String name; - private String alias; - private Schema.Type type; - private Expression expr; + private String name; + private String alias; + private Schema.Type type; + private Expression expr; - public String columnName() { - return Strings.isNullOrEmpty(alias) ? name : alias; - } + public String columnName() { + return Strings.isNullOrEmpty(alias) ? name : alias; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java index 6e04c674cb..304a16756b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java @@ -3,156 +3,134 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import org.opensearch.sql.legacy.query.planner.resource.blocksize.AdaptiveBlockSize; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize.FixedBlockSize; -/** - * Query planner configuration - */ +/** Query planner configuration */ public class Config { - public static final int DEFAULT_BLOCK_SIZE = 10000; - public static final int DEFAULT_SCROLL_PAGE_SIZE = 10000; - public static final int DEFAULT_CIRCUIT_BREAK_LIMIT = 85; - public static final double[] DEFAULT_BACK_OFF_RETRY_INTERVALS = {4, 8 + 4, 16 + 4}; - public static final int DEFAULT_TIME_OUT = 60; - - /** - * Block size for join algorithm - */ - private BlockSize blockSize = new FixedBlockSize(DEFAULT_BLOCK_SIZE); - - /** - * Page size for scroll on each index - */ - private Integer[] scrollPageSizes = {DEFAULT_SCROLL_PAGE_SIZE, DEFAULT_SCROLL_PAGE_SIZE}; - - /** - * Circuit breaker trigger limit (percentage) - */ - private Integer circuitBreakLimit = DEFAULT_CIRCUIT_BREAK_LIMIT; - - /** - * Intervals for back off retry - */ - private double[] backOffRetryIntervals = DEFAULT_BACK_OFF_RETRY_INTERVALS; - - /** - * Total number of rows in final result specified by LIMIT - */ - private int totalLimit; - - /** - * Number of rows fetched from each table specified by JOIN_TABLES_LIMIT hint - */ - private int tableLimit1; - private int tableLimit2; - - /** - * Push down column values in ON of first table to query against second table - */ - private boolean isUseTermsFilterOptimization = false; - - /** - * Total time out (seconds) for the execution - */ - private int timeout = DEFAULT_TIME_OUT; - - - public BlockSize blockSize() { - return blockSize; - } + public static final int DEFAULT_BLOCK_SIZE = 10000; + public static final int DEFAULT_SCROLL_PAGE_SIZE = 10000; + public static final int DEFAULT_CIRCUIT_BREAK_LIMIT = 85; + public static final double[] DEFAULT_BACK_OFF_RETRY_INTERVALS = {4, 8 + 4, 16 + 4}; + public static final int DEFAULT_TIME_OUT = 60; - public void configureBlockSize(Object[] params) { - if (params.length > 0) { - Integer size = (Integer) params[0]; - if (size > 0) { - blockSize = new FixedBlockSize(size); - } else { - blockSize = new AdaptiveBlockSize(0); - } - } - } + /** Block size for join algorithm */ + private BlockSize blockSize = new FixedBlockSize(DEFAULT_BLOCK_SIZE); - public Integer[] scrollPageSize() { - return scrollPageSizes; - } + /** Page size for scroll on each index */ + private Integer[] scrollPageSizes = {DEFAULT_SCROLL_PAGE_SIZE, DEFAULT_SCROLL_PAGE_SIZE}; - public void configureScrollPageSize(Object[] params) { - if (params.length == 1) { - scrollPageSizes = new Integer[]{ - (Integer) params[0], - (Integer) params[0] - }; - } else if (params.length >= 2) { - scrollPageSizes = (Integer[]) params; - } - } + /** Circuit breaker trigger limit (percentage) */ + private Integer circuitBreakLimit = DEFAULT_CIRCUIT_BREAK_LIMIT; - public int circuitBreakLimit() { - return circuitBreakLimit; - } + /** Intervals for back off retry */ + private double[] backOffRetryIntervals = DEFAULT_BACK_OFF_RETRY_INTERVALS; - public void configureCircuitBreakLimit(Object[] params) { - if (params.length > 0) { - circuitBreakLimit = (Integer) params[0]; - } - } + /** Total number of rows in final result specified by LIMIT */ + private int totalLimit; - public double[] backOffRetryIntervals() { - return backOffRetryIntervals; - } + /** Number of rows fetched from each table specified by JOIN_TABLES_LIMIT hint */ + private int tableLimit1; - public void configureBackOffRetryIntervals(Object[] params) { - backOffRetryIntervals = new double[params.length]; - for (int i = 0; i < params.length; i++) { - backOffRetryIntervals[i] = (Integer) params[i]; //Only support integer interval for now - } - } + private int tableLimit2; - public void configureLimit(Integer totalLimit, Integer tableLimit1, Integer tableLimit2) { - if (totalLimit != null) { - this.totalLimit = totalLimit; - } - if (tableLimit1 != null) { - this.tableLimit1 = tableLimit1; - } - if (tableLimit2 != null) { - this.tableLimit2 = tableLimit2; - } - } + /** Push down column values in ON of first table to query against second table */ + private boolean isUseTermsFilterOptimization = false; - public int totalLimit() { - return totalLimit; - } + /** Total time out (seconds) for the execution */ + private int timeout = DEFAULT_TIME_OUT; - public int tableLimit1() { - return tableLimit1; + public BlockSize blockSize() { + return blockSize; + } + + public void configureBlockSize(Object[] params) { + if (params.length > 0) { + Integer size = (Integer) params[0]; + if (size > 0) { + blockSize = new FixedBlockSize(size); + } else { + blockSize = new AdaptiveBlockSize(0); + } } + } + + public Integer[] scrollPageSize() { + return scrollPageSizes; + } - public int tableLimit2() { - return tableLimit2; + public void configureScrollPageSize(Object[] params) { + if (params.length == 1) { + scrollPageSizes = new Integer[] {(Integer) params[0], (Integer) params[0]}; + } else if (params.length >= 2) { + scrollPageSizes = (Integer[]) params; } + } + + public int circuitBreakLimit() { + return circuitBreakLimit; + } - public void configureTermsFilterOptimization(boolean isUseTermFiltersOptimization) { - this.isUseTermsFilterOptimization = isUseTermFiltersOptimization; + public void configureCircuitBreakLimit(Object[] params) { + if (params.length > 0) { + circuitBreakLimit = (Integer) params[0]; } + } - public boolean isUseTermsFilterOptimization() { - return isUseTermsFilterOptimization; + public double[] backOffRetryIntervals() { + return backOffRetryIntervals; + } + + public void configureBackOffRetryIntervals(Object[] params) { + backOffRetryIntervals = new double[params.length]; + for (int i = 0; i < params.length; i++) { + backOffRetryIntervals[i] = (Integer) params[i]; // Only support integer interval for now } + } - public void configureTimeOut(Object[] params) { - if (params.length > 0) { - timeout = (Integer) params[0]; - } + public void configureLimit(Integer totalLimit, Integer tableLimit1, Integer tableLimit2) { + if (totalLimit != null) { + this.totalLimit = totalLimit; + } + if (tableLimit1 != null) { + this.tableLimit1 = tableLimit1; + } + if (tableLimit2 != null) { + this.tableLimit2 = tableLimit2; } + } - public int timeout() { - return timeout; + public int totalLimit() { + return totalLimit; + } + + public int tableLimit1() { + return tableLimit1; + } + + public int tableLimit2() { + return tableLimit2; + } + + public void configureTermsFilterOptimization(boolean isUseTermFiltersOptimization) { + this.isUseTermsFilterOptimization = isUseTermFiltersOptimization; + } + + public boolean isUseTermsFilterOptimization() { + return isUseTermsFilterOptimization; + } + + public void configureTimeOut(Object[] params) { + if (params.length > 0) { + timeout = (Integer) params[0]; } + } + + public int timeout() { + return timeout; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java index efaf7057b6..86f155d626 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java @@ -3,22 +3,20 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.estimation; public class Cost implements Comparable { - public static final Cost INFINITY = new Cost(); + public static final Cost INFINITY = new Cost(); - private long inputSize; + private long inputSize; - private long time; + private long time; - public Cost() { - } + public Cost() {} - @Override - public int compareTo(Cost o) { - return 0; - } + @Override + public int compareTo(Cost o) { + return 0; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java index 3b4eb2b48e..19ee573652 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node; import static org.opensearch.sql.legacy.query.planner.core.ExecuteParams.ExecuteParamType.RESOURCE_MANAGER; @@ -19,78 +18,74 @@ import org.opensearch.sql.legacy.query.planner.resource.ResourceManager; /** - * Abstraction for physical operators that load large volume of data and generally prefetch for efficiency. + * Abstraction for physical operators that load large volume of data and generally prefetch for + * efficiency. * * @param */ public abstract class BatchPhysicalOperator implements PhysicalOperator { - protected static final Logger LOG = LogManager.getLogger(); + protected static final Logger LOG = LogManager.getLogger(); - /** - * Resource monitor to avoid consuming too much resource - */ - private ResourceManager resourceMgr; + /** Resource monitor to avoid consuming too much resource */ + private ResourceManager resourceMgr; - /** - * Current batch of data - */ - private Iterator> curBatch; + /** Current batch of data */ + private Iterator> curBatch; - @Override - public void open(ExecuteParams params) throws Exception { - //PhysicalOperator.super.open(params); // Child needs to call this super.open() and open its next node too - resourceMgr = params.get(RESOURCE_MANAGER); - } + @Override + public void open(ExecuteParams params) throws Exception { + // Child needs to call this super.open() and open its next node too + // PhysicalOperator.super.open(params); + resourceMgr = params.get(RESOURCE_MANAGER); + } - @Override - public boolean hasNext() { - if (isNoMoreDataInCurrentBatch()) { - LOG.debug("{} No more data in current batch, pre-fetching next batch", this); - Collection> nextBatch = prefetchSafely(); + @Override + public boolean hasNext() { + if (isNoMoreDataInCurrentBatch()) { + LOG.debug("{} No more data in current batch, pre-fetching next batch", this); + Collection> nextBatch = prefetchSafely(); - LOG.debug("{} Pre-fetched {} rows", this, nextBatch.size()); - if (LOG.isTraceEnabled()) { - nextBatch.forEach(row -> LOG.trace("Row pre-fetched: {}", row)); - } + LOG.debug("{} Pre-fetched {} rows", this, nextBatch.size()); + if (LOG.isTraceEnabled()) { + nextBatch.forEach(row -> LOG.trace("Row pre-fetched: {}", row)); + } - curBatch = nextBatch.iterator(); - } - return curBatch.hasNext(); + curBatch = nextBatch.iterator(); } - - @Override - public Row next() { - return curBatch.next(); - } - - /** - * Prefetch next batch safely by checking resource monitor - */ - private Collection> prefetchSafely() { - Objects.requireNonNull(resourceMgr, "ResourceManager is not set so unable to do sanity check"); - - boolean isHealthy = resourceMgr.isHealthy(); - boolean isTimeout = resourceMgr.isTimeout(); - if (isHealthy && !isTimeout) { - try { - return prefetch(); - } catch (Exception e) { - throw new IllegalStateException("Failed to prefetch next batch", e); - } - } - throw new IllegalStateException("Exit due to " + (isHealthy ? "time out" : "insufficient resource")); + return curBatch.hasNext(); + } + + @Override + public Row next() { + return curBatch.next(); + } + + /** Prefetch next batch safely by checking resource monitor */ + private Collection> prefetchSafely() { + Objects.requireNonNull(resourceMgr, "ResourceManager is not set so unable to do sanity check"); + + boolean isHealthy = resourceMgr.isHealthy(); + boolean isTimeout = resourceMgr.isTimeout(); + if (isHealthy && !isTimeout) { + try { + return prefetch(); + } catch (Exception e) { + throw new IllegalStateException("Failed to prefetch next batch", e); + } } - - /** - * Prefetch next batch if current is exhausted. - * - * @return next batch - */ - protected abstract Collection> prefetch() throws Exception; - - private boolean isNoMoreDataInCurrentBatch() { - return curBatch == null || !curBatch.hasNext(); - } - + throw new IllegalStateException( + "Exit due to " + (isHealthy ? "time out" : "insufficient resource")); + } + + /** + * Prefetch next batch if current is exhausted. + * + * @return next batch + */ + protected abstract Collection> prefetch() throws Exception; + + private boolean isNoMoreDataInCurrentBatch() { + return curBatch == null || !curBatch.hasNext(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java index 19c0ae41d2..90bf9923d3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -25,91 +24,87 @@ import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize; -/** - * Block-based Hash Join implementation - */ +/** Block-based Hash Join implementation */ public class BlockHashJoin extends JoinAlgorithm { - /** - * Use terms filter optimization or not - */ - private final boolean isUseTermsFilterOptimization; + /** Use terms filter optimization or not */ + private final boolean isUseTermsFilterOptimization; - public BlockHashJoin(PhysicalOperator left, - PhysicalOperator right, - JoinType type, - JoinCondition condition, - BlockSize blockSize, - boolean isUseTermsFilterOptimization) { - super(left, right, type, condition, blockSize); + public BlockHashJoin( + PhysicalOperator left, + PhysicalOperator right, + JoinType type, + JoinCondition condition, + BlockSize blockSize, + boolean isUseTermsFilterOptimization) { + super(left, right, type, condition, blockSize); - this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; - } + this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; + } - @Override - public Cost estimate() { - return new Cost(); - } + @Override + public Cost estimate() { + return new Cost(); + } - @Override - protected void reopenRight() throws Exception { - Objects.requireNonNull(params, "Execute params is not set so unable to add extra filter"); + @Override + protected void reopenRight() throws Exception { + Objects.requireNonNull(params, "Execute params is not set so unable to add extra filter"); - if (isUseTermsFilterOptimization) { - params.add(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER, queryForPushedDownOnConds()); - } - right.open(params); + if (isUseTermsFilterOptimization) { + params.add(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER, queryForPushedDownOnConds()); } - - @Override - protected List> probe() { - List> combinedRows = new ArrayList<>(); - int totalSize = 0; - - /* Return if already found enough matched rows to give ResourceMgr a chance to check resource usage */ - while (right.hasNext() && totalSize < hashTable.size()) { - Row rightRow = right.next(); - Collection> matchedLeftRows = hashTable.match(rightRow); - - if (!matchedLeftRows.isEmpty()) { - combinedRows.add(new CombinedRow<>(rightRow, matchedLeftRows)); - totalSize += matchedLeftRows.size(); - } - } - return combinedRows; + right.open(params); + } + + @Override + protected List> probe() { + List> combinedRows = new ArrayList<>(); + int totalSize = 0; + + /* Return if already found enough matched rows to give ResourceMgr a chance to check resource usage */ + while (right.hasNext() && totalSize < hashTable.size()) { + Row rightRow = right.next(); + Collection> matchedLeftRows = hashTable.match(rightRow); + + if (!matchedLeftRows.isEmpty()) { + combinedRows.add(new CombinedRow<>(rightRow, matchedLeftRows)); + totalSize += matchedLeftRows.size(); + } } - - /** - * Build query for pushed down conditions in ON - */ - private BoolQueryBuilder queryForPushedDownOnConds() { - BoolQueryBuilder orQuery = boolQuery(); - Map>[] rightNameToLeftValuesGroup = hashTable.rightFieldWithLeftValues(); - - for (Map> rightNameToLeftValues : rightNameToLeftValuesGroup) { - if (LOG.isTraceEnabled()) { - rightNameToLeftValues.forEach((rightName, leftValues) -> - LOG.trace("Right name to left values mapping: {} => {}", rightName, leftValues)); - } - - BoolQueryBuilder andQuery = boolQuery(); - rightNameToLeftValues.forEach( - (rightName, leftValues) -> andQuery.must(termsQuery(rightName, leftValues)) - ); - - if (LOG.isTraceEnabled()) { - LOG.trace("Terms filter optimization: {}", Strings.toString(XContentType.JSON, andQuery)); - } - orQuery.should(andQuery); - } - return orQuery; + return combinedRows; + } + + /** Build query for pushed down conditions in ON */ + private BoolQueryBuilder queryForPushedDownOnConds() { + BoolQueryBuilder orQuery = boolQuery(); + Map>[] rightNameToLeftValuesGroup = + hashTable.rightFieldWithLeftValues(); + + for (Map> rightNameToLeftValues : rightNameToLeftValuesGroup) { + if (LOG.isTraceEnabled()) { + rightNameToLeftValues.forEach( + (rightName, leftValues) -> + LOG.trace("Right name to left values mapping: {} => {}", rightName, leftValues)); + } + + BoolQueryBuilder andQuery = boolQuery(); + rightNameToLeftValues.forEach( + (rightName, leftValues) -> andQuery.must(termsQuery(rightName, leftValues))); + + if (LOG.isTraceEnabled()) { + LOG.trace("Terms filter optimization: {}", Strings.toString(XContentType.JSON, andQuery)); + } + orQuery.should(andQuery); } + return orQuery; + } - /********************************************* - * Getters for Explain - *********************************************/ + /********************************************* + * Getters for Explain + *********************************************/ - public boolean isUseTermsFilterOptimization() { - return isUseTermsFilterOptimization; - } + public boolean isUseTermsFilterOptimization() { + return isUseTermsFilterOptimization; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java index e83bbb7d0e..b1fb43441e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import java.util.ArrayList; @@ -19,28 +18,28 @@ */ public class CombinedRow { - private Row rightRow; - private Collection> leftRows; + private Row rightRow; + private Collection> leftRows; - public CombinedRow(Row rightRow, Collection> leftRows) { - this.rightRow = rightRow; - this.leftRows = leftRows; - } + public CombinedRow(Row rightRow, Collection> leftRows) { + this.rightRow = rightRow; + this.leftRows = leftRows; + } - public List> combine() { - List> combinedRows = new ArrayList<>(); - for (Row leftRow : leftRows) { - combinedRows.add(leftRow.combine(rightRow)); - } - return combinedRows; + public List> combine() { + List> combinedRows = new ArrayList<>(); + for (Row leftRow : leftRows) { + combinedRows.add(leftRow.combine(rightRow)); } + return combinedRows; + } - public Collection> leftMatchedRows() { - return Collections.unmodifiableCollection(leftRows); - } + public Collection> leftMatchedRows() { + return Collections.unmodifiableCollection(leftRows); + } - @Override - public String toString() { - return "CombinedRow{rightRow=" + rightRow + ", leftRows=" + leftRows + '}'; - } + @Override + public String toString() { + return "CombinedRow{rightRow=" + rightRow + ", leftRows=" + leftRows + '}'; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java index 733d7a78ab..23e79d2c31 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static java.util.Collections.emptyList; @@ -22,102 +21,98 @@ import org.opensearch.sql.legacy.query.planner.physical.Row.RowKey; /** - * Hash table implementation. - * In the case of no join condition, hash table degrades to linked list with all rows in block paired to RowKey.NULL + * Hash table implementation. In the case of no join condition, hash table degrades to linked list + * with all rows in block paired to RowKey.NULL * * @param Row data type */ public class DefaultHashTable implements HashTable { - private static final Logger LOG = LogManager.getLogger(); - - /** - * Hash table implementation - */ - private final Multimap> table = ArrayListMultimap.create(); - - /** - * Left join conditions to generate key to build hash table by left rows from block - */ - private final String[] leftJoinFields; - - /** - * Right join conditions to generate key to probe hash table by right rows - */ - private final String[] rightJoinFields; - - - public DefaultHashTable(String[] leftJoinFields, String[] rightJoinFields) { - this.leftJoinFields = leftJoinFields; - this.rightJoinFields = rightJoinFields; + private static final Logger LOG = LogManager.getLogger(); + + /** Hash table implementation */ + private final Multimap> table = ArrayListMultimap.create(); + + /** Left join conditions to generate key to build hash table by left rows from block */ + private final String[] leftJoinFields; + + /** Right join conditions to generate key to probe hash table by right rows */ + private final String[] rightJoinFields; + + public DefaultHashTable(String[] leftJoinFields, String[] rightJoinFields) { + this.leftJoinFields = leftJoinFields; + this.rightJoinFields = rightJoinFields; + } + + /** + * Add row in block to hash table by left conditions in ON. For the duplicate key, append them to + * the list in value (MultiMap) + */ + @Override + public void add(Row row) { + RowKey key = row.key(leftJoinFields); + if (key == RowKey.NULL) { + LOG.debug( + "Skip rows with NULL column value during build: row={}, conditions={}", + row, + leftJoinFields); + } else { + table.put(key, row); } - - /** - * Add row in block to hash table by left conditions in ON. - * For the duplicate key, append them to the list in value (MultiMap) - */ - @Override - public void add(Row row) { - RowKey key = row.key(leftJoinFields); - if (key == RowKey.NULL) { - LOG.debug("Skip rows with NULL column value during build: row={}, conditions={}", row, leftJoinFields); - } else { - table.put(key, row); - } + } + + /** Probe hash table to match right rows by values of right conditions */ + @Override + public Collection> match(Row row) { + RowKey key = row.key(rightJoinFields); + if (key == RowKey.NULL) { + LOG.debug( + "Skip rows with NULL column value during probing: row={}, conditions={}", + row, + rightJoinFields); + return emptyList(); } - - /** - * Probe hash table to match right rows by values of right conditions - */ - @Override - public Collection> match(Row row) { - RowKey key = row.key(rightJoinFields); - if (key == RowKey.NULL) { - LOG.debug("Skip rows with NULL column value during probing: row={}, conditions={}", row, rightJoinFields); - return emptyList(); - } - return table.get(key); // Multimap returns empty list rather null. + return table.get(key); // Multimap returns empty list rather null. + } + + /** Right joined field name with according column value list to push down */ + @SuppressWarnings("unchecked") + @Override + public Map>[] rightFieldWithLeftValues() { + Map> result = + new HashMap<>(); // Eliminate potential duplicate in values + for (RowKey key : table.keySet()) { + Object[] keys = key.keys(); + for (int i = 0; i < keys.length; i++) { + result + .computeIfAbsent(rightJoinFields[i], (k -> new HashSet<>())) + .add(lowercaseIfStr(keys[i])); // Terms stored in lower case in OpenSearch + } } - /** - * Right joined field name with according column value list to push down - */ - @SuppressWarnings("unchecked") - @Override - public Map>[] rightFieldWithLeftValues() { - Map> result = new HashMap<>(); // Eliminate potential duplicate in values - for (RowKey key : table.keySet()) { - Object[] keys = key.keys(); - for (int i = 0; i < keys.length; i++) { - result.computeIfAbsent(rightJoinFields[i], (k -> new HashSet<>())). - add(lowercaseIfStr(keys[i])); // Terms stored in lower case in OpenSearch - } - } - - // Convert value of Map from Guava's Set to JDK list which is expected by OpenSearch writer - for (Entry> entry : result.entrySet()) { - entry.setValue(new ArrayList<>(entry.getValue())); - } - return new Map[]{result}; + // Convert value of Map from Guava's Set to JDK list which is expected by OpenSearch writer + for (Entry> entry : result.entrySet()) { + entry.setValue(new ArrayList<>(entry.getValue())); } - - @Override - public int size() { - return table.size(); - } - - @Override - public boolean isEmpty() { - return table.isEmpty(); - } - - @Override - public void clear() { - table.clear(); - } - - private Object lowercaseIfStr(Object key) { - return key instanceof String ? ((String) key).toLowerCase() : key; - } - + return new Map[] {result}; + } + + @Override + public int size() { + return table.size(); + } + + @Override + public boolean isEmpty() { + return table.isEmpty(); + } + + @Override + public void clear() { + table.clear(); + } + + private Object lowercaseIfStr(Object key) { + return key instanceof String ? ((String) key).toLowerCase() : key; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java index 9e3a190e30..41f500fed1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import java.util.Map; @@ -13,25 +12,25 @@ @RequiredArgsConstructor public class BindingTupleRow implements Row { - private final BindingTuple bindingTuple; - - @Override - public RowKey key(String[] colNames) { - return null; - } - - @Override - public Row combine(Row otherRow) { - throw new RuntimeException("unsupported operation"); - } - - @Override - public void retain(Map colNameAlias) { - // do nothing - } - - @Override - public BindingTuple data() { - return bindingTuple; - } + private final BindingTuple bindingTuple; + + @Override + public RowKey key(String[] colNames) { + return null; + } + + @Override + public Row combine(Row otherRow) { + throw new RuntimeException("unsupported operation"); + } + + @Override + public void retain(Map colNameAlias) { + // do nothing + } + + @Override + public BindingTuple data() { + return bindingTuple; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java index d68b16b8bb..6e5a2703f4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java @@ -3,42 +3,35 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.blocksize; -/** - * Block size calculating logic. - */ +/** Block size calculating logic. */ public interface BlockSize { - /** - * Get block size configured or dynamically. Integer should be sufficient for single block size. - * - * @return block size. - */ - int size(); - + /** + * Get block size configured or dynamically. Integer should be sufficient for single block size. + * + * @return block size. + */ + int size(); - /** - * Default implementation with fixed block size - */ - class FixedBlockSize implements BlockSize { + /** Default implementation with fixed block size */ + class FixedBlockSize implements BlockSize { - private int blockSize; + private int blockSize; - public FixedBlockSize(int blockSize) { - this.blockSize = blockSize; - } - - @Override - public int size() { - return blockSize; - } + public FixedBlockSize(int blockSize) { + this.blockSize = blockSize; + } - @Override - public String toString() { - return "FixedBlockSize with " + "size=" + blockSize; - } + @Override + public int size() { + return blockSize; } + @Override + public String toString() { + return "FixedBlockSize with size=" + blockSize; + } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java index 2768b269bf..c4f3ee5a10 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.identifier; import com.alibaba.druid.sql.ast.expr.SQLBooleanExpr; @@ -17,53 +16,53 @@ import org.opensearch.sql.legacy.rewriter.RewriteRule; /** - * Rewrite rule to anonymize sensitive data in logging queries. - * This rule replace the content of specific nodes (that might involve index data) in AST - * to anonymous content. + * Rewrite rule to anonymize sensitive data in logging queries. This rule replace the content of + * specific nodes (that might involve index data) in AST to anonymous content. */ -public class AnonymizeSensitiveDataRule extends MySqlASTVisitorAdapter implements RewriteRule { +public class AnonymizeSensitiveDataRule extends MySqlASTVisitorAdapter + implements RewriteRule { - @Override - public boolean visit(SQLIdentifierExpr identifierExpr) { - if (identifierExpr.getParent() instanceof SQLExprTableSource) { - identifierExpr.setName("table"); - } else { - identifierExpr.setName("identifier"); - } - return true; + @Override + public boolean visit(SQLIdentifierExpr identifierExpr) { + if (identifierExpr.getParent() instanceof SQLExprTableSource) { + identifierExpr.setName("table"); + } else { + identifierExpr.setName("identifier"); } + return true; + } - @Override - public boolean visit(SQLIntegerExpr integerExpr) { - integerExpr.setNumber(0); - return true; - } + @Override + public boolean visit(SQLIntegerExpr integerExpr) { + integerExpr.setNumber(0); + return true; + } - @Override - public boolean visit(SQLNumberExpr numberExpr) { - numberExpr.setNumber(0); - return true; - } + @Override + public boolean visit(SQLNumberExpr numberExpr) { + numberExpr.setNumber(0); + return true; + } - @Override - public boolean visit(SQLCharExpr charExpr) { - charExpr.setText("string_literal"); - return true; - } + @Override + public boolean visit(SQLCharExpr charExpr) { + charExpr.setText("string_literal"); + return true; + } - @Override - public boolean visit(SQLBooleanExpr booleanExpr) { - booleanExpr.setValue(false); - return true; - } + @Override + public boolean visit(SQLBooleanExpr booleanExpr) { + booleanExpr.setValue(false); + return true; + } - @Override - public boolean match(SQLQueryExpr expr) { - return true; - } + @Override + public boolean match(SQLQueryExpr expr) { + return true; + } - @Override - public void rewrite(SQLQueryExpr expr) { - expr.accept(this); - } + @Override + public void rewrite(SQLQueryExpr expr) { + expr.accept(this); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java index df9f4c88b2..fb62f60ae7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class BoundingBoxFilterParams { - private Point topLeft; - private Point bottomRight; + private Point topLeft; + private Point bottomRight; - public BoundingBoxFilterParams(Point topLeft, Point bottomRight) { - this.topLeft = topLeft; - this.bottomRight = bottomRight; - } + public BoundingBoxFilterParams(Point topLeft, Point bottomRight) { + this.topLeft = topLeft; + this.bottomRight = bottomRight; + } - public Point getTopLeft() { - return topLeft; - } + public Point getTopLeft() { + return topLeft; + } - public Point getBottomRight() { - return bottomRight; - } + public Point getBottomRight() { + return bottomRight; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java index fc3dc35f07..6c50c17467 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java @@ -3,36 +3,33 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 15/8/2015. - */ +/** Created by Eliran on 15/8/2015. */ public class CellFilterParams { - private Point geohashPoint; - private int precision; - private boolean neighbors; - - public CellFilterParams(Point geohashPoint, int precision, boolean neighbors) { - this.geohashPoint = geohashPoint; - this.precision = precision; - this.neighbors = neighbors; - } - - public CellFilterParams(Point geohashPoint, int precision) { - this(geohashPoint, precision, false); - } - - public Point getGeohashPoint() { - return geohashPoint; - } - - public int getPrecision() { - return precision; - } - - public boolean isNeighbors() { - return neighbors; - } + private Point geohashPoint; + private int precision; + private boolean neighbors; + + public CellFilterParams(Point geohashPoint, int precision, boolean neighbors) { + this.geohashPoint = geohashPoint; + this.precision = precision; + this.neighbors = neighbors; + } + + public CellFilterParams(Point geohashPoint, int precision) { + this(geohashPoint, precision, false); + } + + public Point getGeohashPoint() { + return geohashPoint; + } + + public int getPrecision() { + return precision; + } + + public boolean isNeighbors() { + return neighbors; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java index 1141da08ca..8c419de58d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class DistanceFilterParams { - private String distance; - private Point from; + private String distance; + private Point from; - public DistanceFilterParams(String distance, Point from) { - this.distance = distance; - this.from = from; - } + public DistanceFilterParams(String distance, Point from) { + this.distance = distance; + this.from = from; + } - public String getDistance() { - return distance; - } + public String getDistance() { + return distance; + } - public Point getFrom() { - return from; - } + public Point getFrom() { + return from; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java index a8ddfd43e8..0269c6b01c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static org.junit.Assert.assertEquals; @@ -30,78 +29,75 @@ import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Test base type compatibility - */ +/** Test base type compatibility */ public class BaseTypeTest { - @Test - public void unknownTypeNameShouldReturnUnknown() { - assertEquals(UNKNOWN, OpenSearchDataType.typeOf("this_is_a_new_es_type_we_arent_aware")); - } - - @Test - public void typeOfShouldIgnoreCase() { - assertEquals(INTEGER, OpenSearchDataType.typeOf("Integer")); - } - - @Test - public void sameBaseTypeShouldBeCompatible() { - assertTrue(INTEGER.isCompatible(INTEGER)); - assertTrue(BOOLEAN.isCompatible(BOOLEAN)); - } - - @Test - public void parentBaseTypeShouldBeCompatibleWithSubBaseType() { - assertTrue(NUMBER.isCompatible(DOUBLE)); - assertTrue(DOUBLE.isCompatible(FLOAT)); - assertTrue(FLOAT.isCompatible(INTEGER)); - assertTrue(INTEGER.isCompatible(SHORT)); - assertTrue(INTEGER.isCompatible(LONG)); - assertTrue(STRING.isCompatible(TEXT)); - assertTrue(STRING.isCompatible(KEYWORD)); - assertTrue(DATE.isCompatible(STRING)); - } - - @Test - public void ancestorBaseTypeShouldBeCompatibleWithSubBaseType() { - assertTrue(NUMBER.isCompatible(LONG)); - assertTrue(NUMBER.isCompatible(DOUBLE)); - assertTrue(DOUBLE.isCompatible(INTEGER)); - assertTrue(INTEGER.isCompatible(SHORT)); - assertTrue(INTEGER.isCompatible(LONG)); - } - - @Ignore("Two way compatibility is not necessary") - @Test - public void subBaseTypeShouldBeCompatibleWithParentBaseType() { - assertTrue(KEYWORD.isCompatible(STRING)); - } - - @Test - public void nonRelatedBaseTypeShouldNotBeCompatible() { - assertFalse(SHORT.isCompatible(TEXT)); - assertFalse(DATE.isCompatible(BOOLEAN)); - } - - @Test - public void unknownBaseTypeShouldBeCompatibleWithAnyBaseType() { - assertTrue(UNKNOWN.isCompatible(INTEGER)); - assertTrue(UNKNOWN.isCompatible(KEYWORD)); - assertTrue(UNKNOWN.isCompatible(BOOLEAN)); - } - - @Test - public void anyBaseTypeShouldBeCompatibleWithUnknownBaseType() { - assertTrue(LONG.isCompatible(UNKNOWN)); - assertTrue(TEXT.isCompatible(UNKNOWN)); - assertTrue(DATE.isCompatible(UNKNOWN)); - } - - @Test - public void nestedIndexTypeShouldBeCompatibleWithNestedDataType() { - assertTrue(NESTED.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); - assertTrue(OPENSEARCH_TYPE.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); - } - + @Test + public void unknownTypeNameShouldReturnUnknown() { + assertEquals(UNKNOWN, OpenSearchDataType.typeOf("this_is_a_new_es_type_we_arent_aware")); + } + + @Test + public void typeOfShouldIgnoreCase() { + assertEquals(INTEGER, OpenSearchDataType.typeOf("Integer")); + } + + @Test + public void sameBaseTypeShouldBeCompatible() { + assertTrue(INTEGER.isCompatible(INTEGER)); + assertTrue(BOOLEAN.isCompatible(BOOLEAN)); + } + + @Test + public void parentBaseTypeShouldBeCompatibleWithSubBaseType() { + assertTrue(NUMBER.isCompatible(DOUBLE)); + assertTrue(DOUBLE.isCompatible(FLOAT)); + assertTrue(FLOAT.isCompatible(INTEGER)); + assertTrue(INTEGER.isCompatible(SHORT)); + assertTrue(INTEGER.isCompatible(LONG)); + assertTrue(STRING.isCompatible(TEXT)); + assertTrue(STRING.isCompatible(KEYWORD)); + assertTrue(DATE.isCompatible(STRING)); + } + + @Test + public void ancestorBaseTypeShouldBeCompatibleWithSubBaseType() { + assertTrue(NUMBER.isCompatible(LONG)); + assertTrue(NUMBER.isCompatible(DOUBLE)); + assertTrue(DOUBLE.isCompatible(INTEGER)); + assertTrue(INTEGER.isCompatible(SHORT)); + assertTrue(INTEGER.isCompatible(LONG)); + } + + @Ignore("Two way compatibility is not necessary") + @Test + public void subBaseTypeShouldBeCompatibleWithParentBaseType() { + assertTrue(KEYWORD.isCompatible(STRING)); + } + + @Test + public void nonRelatedBaseTypeShouldNotBeCompatible() { + assertFalse(SHORT.isCompatible(TEXT)); + assertFalse(DATE.isCompatible(BOOLEAN)); + } + + @Test + public void unknownBaseTypeShouldBeCompatibleWithAnyBaseType() { + assertTrue(UNKNOWN.isCompatible(INTEGER)); + assertTrue(UNKNOWN.isCompatible(KEYWORD)); + assertTrue(UNKNOWN.isCompatible(BOOLEAN)); + } + + @Test + public void anyBaseTypeShouldBeCompatibleWithUnknownBaseType() { + assertTrue(LONG.isCompatible(UNKNOWN)); + assertTrue(TEXT.isCompatible(UNKNOWN)); + assertTrue(DATE.isCompatible(UNKNOWN)); + } + + @Test + public void nestedIndexTypeShouldBeCompatibleWithNestedDataType() { + assertTrue(NESTED.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); + assertTrue(OPENSEARCH_TYPE.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java index c4e7a7e725..be4b5a5197 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import static java.util.Collections.emptyList; @@ -25,95 +24,97 @@ import org.opensearch.sql.legacy.antlr.semantic.visitor.TypeChecker; import org.opensearch.sql.legacy.exception.SqlFeatureNotImplementedException; -/** - * Test cases for AntlrSqlParseTreeVisitor - */ +/** Test cases for AntlrSqlParseTreeVisitor */ public class AntlrSqlParseTreeVisitorTest { - private TypeChecker analyzer = new TypeChecker(new SemanticContext()) { + private TypeChecker analyzer = + new TypeChecker(new SemanticContext()) { @Override public Type visitIndexName(String indexName) { - return null; // avoid querying mapping on null LocalClusterState + return null; // avoid querying mapping on null LocalClusterState } @Override public Type visitFieldName(String fieldName) { - switch (fieldName) { - case "age": return INTEGER; - case "birthday": return DATE; - default: return UNKNOWN; - } + switch (fieldName) { + case "age": + return INTEGER; + case "birthday": + return DATE; + default: + return UNKNOWN; + } } - }; - - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void selectNumberShouldReturnNumberAsQueryVisitingResult() { - Type result = visit("SELECT age FROM test"); - Assert.assertSame(result, INTEGER); - } - - @Test - public void selectNumberAndDateShouldReturnProductOfThemAsQueryVisitingResult() { - Type result = visit("SELECT age, birthday FROM test"); - Assert.assertTrue(result instanceof Product ); - Assert.assertTrue(result.isCompatible(new Product(Arrays.asList(INTEGER, DATE)))); - } - - @Test - public void selectStarShouldReturnEmptyProductAsQueryVisitingResult() { - Type result = visit("SELECT * FROM test"); - Assert.assertTrue(result instanceof Product); - Assert.assertTrue(result.isCompatible(new Product(emptyList()))); - } - - @Test - public void visitSelectNestedFunctionShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); - visit("SELECT abs(log(age)) FROM test"); - } - - @Test - public void visitWhereNestedFunctionShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); - visit("SELECT age FROM test WHERE abs(log(age)) = 1"); - } - - @Test - public void visitMathConstantAsNestedFunctionShouldPass() { - visit("SELECT abs(pi()) FROM test"); - } - - @Test - public void visitSupportedNestedFunctionShouldPass() { - visit("SELECT sum(nested(name.balance)) FROM test"); - } - - @Test - public void visitFunctionAsAggregatorShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Aggregation calls with function aggregator like [max(abs(age))] are not supported yet"); - visit("SELECT max(abs(age)) FROM test"); - } - - @Test - public void visitUnsupportedOperatorShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Operator [DIV] is not supported yet"); - visit("SELECT balance DIV age FROM test"); - } - - private ParseTree createParseTree(String sql) { - return new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)).analyzeSyntax(sql); - } - - private Type visit(String sql) { - ParseTree parseTree = createParseTree(sql); - return parseTree.accept(new AntlrSqlParseTreeVisitor<>(analyzer)); - } - + }; + + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void selectNumberShouldReturnNumberAsQueryVisitingResult() { + Type result = visit("SELECT age FROM test"); + Assert.assertSame(result, INTEGER); + } + + @Test + public void selectNumberAndDateShouldReturnProductOfThemAsQueryVisitingResult() { + Type result = visit("SELECT age, birthday FROM test"); + Assert.assertTrue(result instanceof Product); + Assert.assertTrue(result.isCompatible(new Product(Arrays.asList(INTEGER, DATE)))); + } + + @Test + public void selectStarShouldReturnEmptyProductAsQueryVisitingResult() { + Type result = visit("SELECT * FROM test"); + Assert.assertTrue(result instanceof Product); + Assert.assertTrue(result.isCompatible(new Product(emptyList()))); + } + + @Test + public void visitSelectNestedFunctionShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); + visit("SELECT abs(log(age)) FROM test"); + } + + @Test + public void visitWhereNestedFunctionShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); + visit("SELECT age FROM test WHERE abs(log(age)) = 1"); + } + + @Test + public void visitMathConstantAsNestedFunctionShouldPass() { + visit("SELECT abs(pi()) FROM test"); + } + + @Test + public void visitSupportedNestedFunctionShouldPass() { + visit("SELECT sum(nested(name.balance)) FROM test"); + } + + @Test + public void visitFunctionAsAggregatorShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage( + "Aggregation calls with function aggregator like [max(abs(age))] are not supported yet"); + visit("SELECT max(abs(age)) FROM test"); + } + + @Test + public void visitUnsupportedOperatorShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage("Operator [DIV] is not supported yet"); + visit("SELECT balance DIV age FROM test"); + } + + private ParseTree createParseTree(String sql) { + return new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)) + .analyzeSyntax(sql); + } + + private Type visit(String sql) { + ParseTree parseTree = createParseTree(sql); + return parseTree.accept(new AntlrSqlParseTreeVisitor<>(analyzer)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java index b26e171ce7..9be2517c4a 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import static java.util.Collections.emptyList; @@ -34,77 +33,69 @@ import org.opensearch.sql.opensearch.setting.OpenSearchSettings; import org.opensearch.threadpool.ThreadPool; -/** - * Test AsyncRestExecutor behavior. - */ +/** Test AsyncRestExecutor behavior. */ @RunWith(MockitoJUnitRunner.Silent.class) public class AsyncRestExecutorTest { - private static final boolean NON_BLOCKING = false; - - @Mock - private RestExecutor executor; + private static final boolean NON_BLOCKING = false; - @Mock - private Client client; + @Mock private RestExecutor executor; - private Map params = emptyMap(); + @Mock private Client client; - @Mock - private QueryAction action; + private Map params = emptyMap(); - @Mock - private RestChannel channel; + @Mock private QueryAction action; - @Mock - private ClusterSettings clusterSettings; + @Mock private RestChannel channel; - @Before - public void setUp() { - when(client.threadPool()).thenReturn(mock(ThreadPool.class)); - when(action.getSqlRequest()).thenReturn(SqlRequest.NULL); - when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); + @Mock private ClusterSettings clusterSettings; - OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); - doReturn(emptyList()).when(settings).getSettings(); - LocalClusterState.state().setPluginSettings(settings); - } + @Before + public void setUp() { + when(client.threadPool()).thenReturn(mock(ThreadPool.class)); + when(action.getSqlRequest()).thenReturn(SqlRequest.NULL); + when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - @Test - public void executeBlockingQuery() throws Exception { - Thread.currentThread().setName(TRANSPORT_WORKER_THREAD_NAME_PREFIX); - execute(); - verifyRunInWorkerThread(); - } + OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); + doReturn(emptyList()).when(settings).getSettings(); + LocalClusterState.state().setPluginSettings(settings); + } - @Test - public void executeBlockingQueryButNotInTransport() throws Exception { - execute(); - verifyRunInCurrentThread(); - } + @Test + public void executeBlockingQuery() throws Exception { + Thread.currentThread().setName(TRANSPORT_WORKER_THREAD_NAME_PREFIX); + execute(); + verifyRunInWorkerThread(); + } - @Test - public void executeNonBlockingQuery() throws Exception { - execute(anyAction -> NON_BLOCKING); - verifyRunInCurrentThread(); - } + @Test + public void executeBlockingQueryButNotInTransport() throws Exception { + execute(); + verifyRunInCurrentThread(); + } - private void execute() throws Exception { - AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor); - asyncExecutor.execute(client, params, action, channel); - } + @Test + public void executeNonBlockingQuery() throws Exception { + execute(anyAction -> NON_BLOCKING); + verifyRunInCurrentThread(); + } - private void execute(Predicate isBlocking) throws Exception { - AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor, isBlocking); - asyncExecutor.execute(client, params, action, channel); - } + private void execute() throws Exception { + AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor); + asyncExecutor.execute(client, params, action, channel); + } - private void verifyRunInCurrentThread() { - verify(client, never()).threadPool(); - } + private void execute(Predicate isBlocking) throws Exception { + AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor, isBlocking); + asyncExecutor.execute(client, params, action, channel); + } - private void verifyRunInWorkerThread() { - verify(client, times(1)).threadPool(); - } + private void verifyRunInCurrentThread() { + verify(client, never()).threadPool(); + } + private void verifyRunInWorkerThread() { + verify(client, times(1)).threadPool(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java index 1a24045881..c877095d8f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import static org.junit.Assert.assertEquals; @@ -13,25 +12,21 @@ import java.util.stream.Collectors; import org.junit.Test; -/** - * Unit tests for {@link CSVResult} - */ +/** Unit tests for {@link CSVResult} */ public class CSVResultTest { - private static final String SEPARATOR = ","; + private static final String SEPARATOR = ","; - @Test - public void getHeadersShouldReturnHeadersSanitized() { - CSVResult csv = csv(headers("name", "=age"), lines(line("John", "30"))); - assertEquals( - headers("name", "'=age"), - csv.getHeaders() - ); - } + @Test + public void getHeadersShouldReturnHeadersSanitized() { + CSVResult csv = csv(headers("name", "=age"), lines(line("John", "30"))); + assertEquals(headers("name", "'=age"), csv.getHeaders()); + } - @Test - public void getLinesShouldReturnLinesSanitized() { - CSVResult csv = csv( + @Test + public void getLinesShouldReturnLinesSanitized() { + CSVResult csv = + csv( headers("name", "city"), lines( line("John", "Seattle"), @@ -39,53 +34,42 @@ public void getLinesShouldReturnLinesSanitized() { line("John", "+Seattle"), line("-John", "Seattle"), line("@John", "Seattle"), - line("John", "Seattle=") - ) - ); - - assertEquals( - line( - "John,Seattle", - "John,'=Seattle", - "John,'+Seattle", - "'-John,Seattle", - "'@John,Seattle", - "John,Seattle=" - ), - csv.getLines() - ); - } - - @Test - public void getHeadersShouldReturnHeadersQuotedIfRequired() { - CSVResult csv = csv(headers("na,me", ",,age"), lines(line("John", "30"))); - assertEquals( - headers("\"na,me\"", "\",,age\""), - csv.getHeaders() - ); - } - - @Test - public void getLinesShouldReturnLinesQuotedIfRequired() { - CSVResult csv = csv(headers("name", "age"), lines(line("John,Smith", "30,,,"))); - assertEquals( - line("\"John,Smith\",\"30,,,\""), - csv.getLines() - ); - } - - @Test - public void getHeadersShouldReturnHeadersBothSanitizedAndQuotedIfRequired() { - CSVResult csv = csv(headers("na,+me", ",,,=age", "=city,"), lines(line("John", "30", "Seattle"))); - assertEquals( - headers("\"na,+me\"", "\",,,=age\"", "\"'=city,\""), - csv.getHeaders() - ); - } - - @Test - public void getLinesShouldReturnLinesBothSanitizedAndQuotedIfRequired() { - CSVResult csv = csv( + line("John", "Seattle="))); + + assertEquals( + line( + "John,Seattle", + "John,'=Seattle", + "John,'+Seattle", + "'-John,Seattle", + "'@John,Seattle", + "John,Seattle="), + csv.getLines()); + } + + @Test + public void getHeadersShouldReturnHeadersQuotedIfRequired() { + CSVResult csv = csv(headers("na,me", ",,age"), lines(line("John", "30"))); + assertEquals(headers("\"na,me\"", "\",,age\""), csv.getHeaders()); + } + + @Test + public void getLinesShouldReturnLinesQuotedIfRequired() { + CSVResult csv = csv(headers("name", "age"), lines(line("John,Smith", "30,,,"))); + assertEquals(line("\"John,Smith\",\"30,,,\""), csv.getLines()); + } + + @Test + public void getHeadersShouldReturnHeadersBothSanitizedAndQuotedIfRequired() { + CSVResult csv = + csv(headers("na,+me", ",,,=age", "=city,"), lines(line("John", "30", "Seattle"))); + assertEquals(headers("\"na,+me\"", "\",,,=age\"", "\"'=city,\""), csv.getHeaders()); + } + + @Test + public void getLinesShouldReturnLinesBothSanitizedAndQuotedIfRequired() { + CSVResult csv = + csv( headers("name", "city"), lines( line("John", "Seattle"), @@ -93,38 +77,33 @@ public void getLinesShouldReturnLinesBothSanitizedAndQuotedIfRequired() { line("John", "+Sea,ttle"), line(",-John", "Seattle"), line(",,,@John", "Seattle"), - line("John", "Seattle=") - ) - ); - - assertEquals( - line( - "John,Seattle", - "John,'=Seattle", - "John,\"'+Sea,ttle\"", - "\",-John\",Seattle", - "\",,,@John\",Seattle", - "John,Seattle=" - ), - csv.getLines() - ); - } - - private CSVResult csv(List headers, List> lines) { - return new CSVResult(SEPARATOR, headers, lines); - } - - private List headers(String... headers) { - return Arrays.stream(headers).collect(Collectors.toList()); - } - - private List line(String... line) { - return Arrays.stream(line).collect(Collectors.toList()); - } - - @SafeVarargs - private final List> lines(List... lines) { - return Arrays.stream(lines).collect(Collectors.toList()); - } - + line("John", "Seattle="))); + + assertEquals( + line( + "John,Seattle", + "John,'=Seattle", + "John,\"'+Sea,ttle\"", + "\",-John\",Seattle", + "\",,,@John\",Seattle", + "John,Seattle="), + csv.getLines()); + } + + private CSVResult csv(List headers, List> lines) { + return new CSVResult(SEPARATOR, headers, lines); + } + + private List headers(String... headers) { + return Arrays.stream(headers).collect(Collectors.toList()); + } + + private List line(String... line) { + return Arrays.stream(line).collect(Collectors.toList()); + } + + @SafeVarargs + private final List> lines(List... lines) { + return Arrays.stream(lines).collect(Collectors.toList()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java index 5807ee2c44..1c2d1bae62 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.junit.Assert.assertEquals; @@ -18,685 +17,634 @@ public class DateFieldFormatterTest { - @Test - public void testOpenSearchDashboardsSampleDataEcommerceOrderDateField() - { - String columnName = "order_date"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-24T09:28:48+00:00"; - String expectedDateValue = "2020-02-24 09:28:48.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOpenSearchDashboardsSampleDataFlightsTimestampField() - { - String columnName = "timestamp"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-03T00:00:00"; - String expectedDateValue = "2020-02-03 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOpenSearchDashboardsSampleDataFlightsTimestampFieldNoTime() - { - String columnName = "timestamp"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-03T"; - String expectedDateValue = "2020-02-03 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOpenSearchDashboardsSampleDataLogsUtcDateField() - { - String columnName = "utc_date"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-02T00:39:02.912Z"; - String expectedDateValue = "2020-02-02 00:39:02.912"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testEpochMillis() - { - String columnName = "date_field"; - String dateFormat = "epoch_millis"; - String originalDateValue = "727430805000"; - String expectedDateValue = "1993-01-19 08:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testEpochSecond() - { - String columnName = "date_field"; - String dateFormat = "epoch_second"; - String originalDateValue = "727430805"; - String expectedDateValue = "1993-01-19 08:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateOptionalTimeDateOnly() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateOptionalTimeDateAndTime() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = "1993-01-19T00:06:45.123-0800"; - String expectedDateValue = "1993-01-19 08:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicDate() - { - String columnName = "date_field"; - String dateFormat = "basic_date"; - String originalDateValue = "19930119"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicDateTime() - { - String columnName = "date_field"; - String dateFormat = "basic_date_time"; - String originalDateValue = "19930119T120645.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_date_time_no_millis"; - String originalDateValue = "19930119T120645-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicOrdinalDate() - { - String columnName = "date_field"; - String dateFormat = "basic_ordinal_date"; - String originalDateValue = "1993019"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicOrdinalDateTime() - { - String columnName = "date_field"; - String dateFormat = "basic_ordinal_date_time"; - String originalDateValue = "1993019T120645.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicOrdinalDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_ordinal_date_time_no_millis"; - String originalDateValue = "1993019T120645-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTime() - { - String columnName = "date_field"; - String dateFormat = "basic_time"; - String originalDateValue = "120645.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_time_no_millis"; - String originalDateValue = "120645-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTTime() - { - String columnName = "date_field"; - String dateFormat = "basic_t_time"; - String originalDateValue = "T120645.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_t_time_no_millis"; - String originalDateValue = "T120645-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicWeekDate() - { - String columnName = "date_field"; - String dateFormat = "basic_week_date"; - String originalDateValue = "1993W042"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicWeekDateTime() - { - String columnName = "date_field"; - String dateFormat = "basic_week_date_time"; - String originalDateValue = "1993W042T120645.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicWeekDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_week_date_time_no_millis"; - String originalDateValue = "1993W042T120645-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDate() - { - String columnName = "date_field"; - String dateFormat = "date"; - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHour() - { - String columnName = "date_field"; - String dateFormat = "date_hour"; - String originalDateValue = "1993-01-19T12"; - String expectedDateValue = "1993-01-19 12:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinute() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute"; - String originalDateValue = "1993-01-19T12:06"; - String expectedDateValue = "1993-01-19 12:06:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinuteSecond() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute_second"; - String originalDateValue = "1993-01-19T12:06:45"; - String expectedDateValue = "1993-01-19 12:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinuteSecondFraction() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute_second_fraction"; - String originalDateValue = "1993-01-19T12:06:45.123"; - String expectedDateValue = "1993-01-19 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinuteSecondMillis() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute_second_millis"; - String originalDateValue = "1993-01-19T12:06:45.123"; - String expectedDateValue = "1993-01-19 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateTime() - { - String columnName = "date_field"; - String dateFormat = "date_time"; - String originalDateValue = "1993-01-19T12:06:45.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "date_time_no_millis"; - String originalDateValue = "1993-01-19T12:06:45-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHour() - { - String columnName = "date_field"; - String dateFormat = "hour"; - String originalDateValue = "12"; - String expectedDateValue = "1970-01-01 12:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinute() - { - String columnName = "date_field"; - String dateFormat = "hour_minute"; - String originalDateValue = "12:06"; - String expectedDateValue = "1970-01-01 12:06:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinuteSecond() - { - String columnName = "date_field"; - String dateFormat = "hour_minute_second"; - String originalDateValue = "12:06:45"; - String expectedDateValue = "1970-01-01 12:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinuteSecondFraction() - { - String columnName = "date_field"; - String dateFormat = "hour_minute_second_fraction"; - String originalDateValue = "12:06:45.123"; - String expectedDateValue = "1970-01-01 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinuteSecondMillis() - { - String columnName = "date_field"; - String dateFormat = "hour_minute_second_millis"; - String originalDateValue = "12:06:45.123"; - String expectedDateValue = "1970-01-01 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOrdinalDate() - { - String columnName = "date_field"; - String dateFormat = "ordinal_date"; - String originalDateValue = "1993-019"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOrdinalDateTime() - { - String columnName = "date_field"; - String dateFormat = "ordinal_date_time"; - String originalDateValue = "1993-019T12:06:45.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOrdinalDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "ordinal_date_time_no_millis"; - String originalDateValue = "1993-019T12:06:45-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTime() - { - String columnName = "date_field"; - String dateFormat = "time"; - String originalDateValue = "12:06:45.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "time_no_millis"; - String originalDateValue = "12:06:45-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTTime() - { - String columnName = "date_field"; - String dateFormat = "t_time"; - String originalDateValue = "T12:06:45.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "t_time_no_millis"; - String originalDateValue = "T12:06:45-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekDate() - { - String columnName = "date_field"; - String dateFormat = "week_date"; - String originalDateValue = "1993-W04-2"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekDateTime() - { - String columnName = "date_field"; - String dateFormat = "week_date_time"; - String originalDateValue = "1993-W04-2T12:06:45.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "week_date_time_no_millis"; - String originalDateValue = "1993-W04-2T12:06:45-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekyear() - { - String columnName = "date_field"; - String dateFormat = "week_year"; - String originalDateValue = "1993"; - String expectedDateValue = "1993-01-01 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekyearWeek() - { - String columnName = "date_field"; - String dateFormat = "weekyear_week"; - String originalDateValue = "1993-W04"; - String expectedDateValue = "1993-01-17 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekyearWeekDay() - { - String columnName = "date_field"; - String dateFormat = "weekyear_week_day"; - String originalDateValue = "1993-W04-2"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testYear() - { - String columnName = "date_field"; - String dateFormat = "year"; - String originalDateValue = "1993"; - String expectedDateValue = "1993-01-01 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testYearMonth() - { - String columnName = "date_field"; - String dateFormat = "year_month"; - String originalDateValue = "1993-01"; - String expectedDateValue = "1993-01-01 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testYearMonthDay() - { - String columnName = "date_field"; - String dateFormat = "year_month_day"; - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testCustomFormat() - { - String columnName = "date_field"; - String dateFormat = "EEE, MMM d, ''yy"; - - String originalDateValue = "Tue, Jan 19, '93"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testMultipleFormats() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time||epoch_millis"; - - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "727401600000"; - expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testMultipleCustomFormats() - { - String columnName = "date_field"; - String dateFormat = "EEE, MMM d, ''yy||yyMMddHHmmssZ"; - - String originalDateValue = "Tue, Jan 19, '93"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "930119000000-0000"; - expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testNamedAndCustomFormats() - { - String columnName = "date_field"; - String dateFormat = "EEE, MMM d, ''yy||hour_minute_second"; - - String originalDateValue = "Tue, Jan 19, '93"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "12:06:45"; - expectedDateValue = "1970-01-01 12:06:45.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testIncorrectFormat() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = "1581724085"; - // Invalid format for date value; should return original value - String expectedDateValue = "1581724085"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testNullDateData() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = null; - // Nulls should be preserved - String expectedDateValue = null; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testStrictDateOptionalTimeOrEpochMillsShouldPass() - { - String columnName = "date_field"; - String dateFormat = "strict_date_optional_time||epoch_millis"; - - String originalDateValue = "2015-01-01"; - String expectedDateValue = "2015-01-01 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "2015-01-01T12:10:30Z"; - expectedDateValue = "2015-01-01 12:10:30.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "1420070400001"; - expectedDateValue = "2015-01-01 00:00:00.001"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - private void verifyFormatting(String columnName, String dateFormatProperty, String originalDateValue, String expectedDateValue) - { - List columns = buildColumnList(columnName); - Map> dateFieldFormatMap = buildDateFieldFormatMap(columnName, dateFormatProperty); - - Map rowSource = new HashMap<>(); - rowSource.put(columnName, originalDateValue); - - DateFieldFormatter dateFieldFormatter = new DateFieldFormatter(dateFieldFormatMap, columns, new HashMap<>()); - executeFormattingAndCompare(dateFieldFormatter, rowSource, columnName, expectedDateValue); - } - - private void executeFormattingAndCompare( - DateFieldFormatter formatter, - Map rowSource, - String columnToCheck, - String expectedDateValue) { - formatter.applyJDBCDateFormat(rowSource); - assertEquals(expectedDateValue, rowSource.get(columnToCheck)); - } - - private List buildColumnList(String columnName) { - return ImmutableList.builder() - .add(new Schema.Column(columnName, null, Schema.Type.DATE)) - .build(); - } - - private Map> buildDateFieldFormatMap(String columnName, String dateFormatProperty) { - return ImmutableMap.>builder() - .put(columnName, Arrays.asList(dateFormatProperty.split("\\|\\|"))) - .build(); - } + @Test + public void testOpenSearchDashboardsSampleDataEcommerceOrderDateField() { + String columnName = "order_date"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-24T09:28:48+00:00"; + String expectedDateValue = "2020-02-24 09:28:48.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOpenSearchDashboardsSampleDataFlightsTimestampField() { + String columnName = "timestamp"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-03T00:00:00"; + String expectedDateValue = "2020-02-03 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOpenSearchDashboardsSampleDataFlightsTimestampFieldNoTime() { + String columnName = "timestamp"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-03T"; + String expectedDateValue = "2020-02-03 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOpenSearchDashboardsSampleDataLogsUtcDateField() { + String columnName = "utc_date"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-02T00:39:02.912Z"; + String expectedDateValue = "2020-02-02 00:39:02.912"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testEpochMillis() { + String columnName = "date_field"; + String dateFormat = "epoch_millis"; + String originalDateValue = "727430805000"; + String expectedDateValue = "1993-01-19 08:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testEpochSecond() { + String columnName = "date_field"; + String dateFormat = "epoch_second"; + String originalDateValue = "727430805"; + String expectedDateValue = "1993-01-19 08:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateOptionalTimeDateOnly() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateOptionalTimeDateAndTime() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = "1993-01-19T00:06:45.123-0800"; + String expectedDateValue = "1993-01-19 08:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicDate() { + String columnName = "date_field"; + String dateFormat = "basic_date"; + String originalDateValue = "19930119"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicDateTime() { + String columnName = "date_field"; + String dateFormat = "basic_date_time"; + String originalDateValue = "19930119T120645.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_date_time_no_millis"; + String originalDateValue = "19930119T120645-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicOrdinalDate() { + String columnName = "date_field"; + String dateFormat = "basic_ordinal_date"; + String originalDateValue = "1993019"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicOrdinalDateTime() { + String columnName = "date_field"; + String dateFormat = "basic_ordinal_date_time"; + String originalDateValue = "1993019T120645.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicOrdinalDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_ordinal_date_time_no_millis"; + String originalDateValue = "1993019T120645-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTime() { + String columnName = "date_field"; + String dateFormat = "basic_time"; + String originalDateValue = "120645.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_time_no_millis"; + String originalDateValue = "120645-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTTime() { + String columnName = "date_field"; + String dateFormat = "basic_t_time"; + String originalDateValue = "T120645.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_t_time_no_millis"; + String originalDateValue = "T120645-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicWeekDate() { + String columnName = "date_field"; + String dateFormat = "basic_week_date"; + String originalDateValue = "1993W042"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicWeekDateTime() { + String columnName = "date_field"; + String dateFormat = "basic_week_date_time"; + String originalDateValue = "1993W042T120645.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicWeekDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_week_date_time_no_millis"; + String originalDateValue = "1993W042T120645-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDate() { + String columnName = "date_field"; + String dateFormat = "date"; + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHour() { + String columnName = "date_field"; + String dateFormat = "date_hour"; + String originalDateValue = "1993-01-19T12"; + String expectedDateValue = "1993-01-19 12:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinute() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute"; + String originalDateValue = "1993-01-19T12:06"; + String expectedDateValue = "1993-01-19 12:06:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinuteSecond() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute_second"; + String originalDateValue = "1993-01-19T12:06:45"; + String expectedDateValue = "1993-01-19 12:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinuteSecondFraction() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute_second_fraction"; + String originalDateValue = "1993-01-19T12:06:45.123"; + String expectedDateValue = "1993-01-19 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinuteSecondMillis() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute_second_millis"; + String originalDateValue = "1993-01-19T12:06:45.123"; + String expectedDateValue = "1993-01-19 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateTime() { + String columnName = "date_field"; + String dateFormat = "date_time"; + String originalDateValue = "1993-01-19T12:06:45.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "date_time_no_millis"; + String originalDateValue = "1993-01-19T12:06:45-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHour() { + String columnName = "date_field"; + String dateFormat = "hour"; + String originalDateValue = "12"; + String expectedDateValue = "1970-01-01 12:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinute() { + String columnName = "date_field"; + String dateFormat = "hour_minute"; + String originalDateValue = "12:06"; + String expectedDateValue = "1970-01-01 12:06:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinuteSecond() { + String columnName = "date_field"; + String dateFormat = "hour_minute_second"; + String originalDateValue = "12:06:45"; + String expectedDateValue = "1970-01-01 12:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinuteSecondFraction() { + String columnName = "date_field"; + String dateFormat = "hour_minute_second_fraction"; + String originalDateValue = "12:06:45.123"; + String expectedDateValue = "1970-01-01 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinuteSecondMillis() { + String columnName = "date_field"; + String dateFormat = "hour_minute_second_millis"; + String originalDateValue = "12:06:45.123"; + String expectedDateValue = "1970-01-01 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOrdinalDate() { + String columnName = "date_field"; + String dateFormat = "ordinal_date"; + String originalDateValue = "1993-019"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOrdinalDateTime() { + String columnName = "date_field"; + String dateFormat = "ordinal_date_time"; + String originalDateValue = "1993-019T12:06:45.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOrdinalDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "ordinal_date_time_no_millis"; + String originalDateValue = "1993-019T12:06:45-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTime() { + String columnName = "date_field"; + String dateFormat = "time"; + String originalDateValue = "12:06:45.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "time_no_millis"; + String originalDateValue = "12:06:45-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTTime() { + String columnName = "date_field"; + String dateFormat = "t_time"; + String originalDateValue = "T12:06:45.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "t_time_no_millis"; + String originalDateValue = "T12:06:45-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekDate() { + String columnName = "date_field"; + String dateFormat = "week_date"; + String originalDateValue = "1993-W04-2"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekDateTime() { + String columnName = "date_field"; + String dateFormat = "week_date_time"; + String originalDateValue = "1993-W04-2T12:06:45.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "week_date_time_no_millis"; + String originalDateValue = "1993-W04-2T12:06:45-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekyear() { + String columnName = "date_field"; + String dateFormat = "week_year"; + String originalDateValue = "1993"; + String expectedDateValue = "1993-01-01 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekyearWeek() { + String columnName = "date_field"; + String dateFormat = "weekyear_week"; + String originalDateValue = "1993-W04"; + String expectedDateValue = "1993-01-17 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekyearWeekDay() { + String columnName = "date_field"; + String dateFormat = "weekyear_week_day"; + String originalDateValue = "1993-W04-2"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testYear() { + String columnName = "date_field"; + String dateFormat = "year"; + String originalDateValue = "1993"; + String expectedDateValue = "1993-01-01 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testYearMonth() { + String columnName = "date_field"; + String dateFormat = "year_month"; + String originalDateValue = "1993-01"; + String expectedDateValue = "1993-01-01 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testYearMonthDay() { + String columnName = "date_field"; + String dateFormat = "year_month_day"; + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testCustomFormat() { + String columnName = "date_field"; + String dateFormat = "EEE, MMM d, ''yy"; + + String originalDateValue = "Tue, Jan 19, '93"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testMultipleFormats() { + String columnName = "date_field"; + String dateFormat = "date_optional_time||epoch_millis"; + + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "727401600000"; + expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testMultipleCustomFormats() { + String columnName = "date_field"; + String dateFormat = "EEE, MMM d, ''yy||yyMMddHHmmssZ"; + + String originalDateValue = "Tue, Jan 19, '93"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "930119000000-0000"; + expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testNamedAndCustomFormats() { + String columnName = "date_field"; + String dateFormat = "EEE, MMM d, ''yy||hour_minute_second"; + + String originalDateValue = "Tue, Jan 19, '93"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "12:06:45"; + expectedDateValue = "1970-01-01 12:06:45.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testIncorrectFormat() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = "1581724085"; + // Invalid format for date value; should return original value + String expectedDateValue = "1581724085"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testNullDateData() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = null; + // Nulls should be preserved + String expectedDateValue = null; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testStrictDateOptionalTimeOrEpochMillsShouldPass() { + String columnName = "date_field"; + String dateFormat = "strict_date_optional_time||epoch_millis"; + + String originalDateValue = "2015-01-01"; + String expectedDateValue = "2015-01-01 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "2015-01-01T12:10:30Z"; + expectedDateValue = "2015-01-01 12:10:30.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "1420070400001"; + expectedDateValue = "2015-01-01 00:00:00.001"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + private void verifyFormatting( + String columnName, + String dateFormatProperty, + String originalDateValue, + String expectedDateValue) { + List columns = buildColumnList(columnName); + Map> dateFieldFormatMap = + buildDateFieldFormatMap(columnName, dateFormatProperty); + + Map rowSource = new HashMap<>(); + rowSource.put(columnName, originalDateValue); + + DateFieldFormatter dateFieldFormatter = + new DateFieldFormatter(dateFieldFormatMap, columns, new HashMap<>()); + executeFormattingAndCompare(dateFieldFormatter, rowSource, columnName, expectedDateValue); + } + + private void executeFormattingAndCompare( + DateFieldFormatter formatter, + Map rowSource, + String columnToCheck, + String expectedDateValue) { + formatter.applyJDBCDateFormat(rowSource); + assertEquals(expectedDateValue, rowSource.get(columnToCheck)); + } + + private List buildColumnList(String columnName) { + return ImmutableList.builder() + .add(new Schema.Column(columnName, null, Schema.Type.DATE)) + .build(); + } + + private Map> buildDateFieldFormatMap( + String columnName, String dateFormatProperty) { + return ImmutableMap.>builder() + .put(columnName, Arrays.asList(dateFormatProperty.split("\\|\\|"))) + .build(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java index e5f44eacf0..526642e8ea 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; @@ -17,55 +16,53 @@ import org.opensearch.sql.legacy.parser.SqlParser; import org.opensearch.sql.legacy.util.SqlParserUtils; -/** - * Unit test class for feature of aggregation options: DISTINCT, ALL, UNIQUE, DEDUPLICATION - */ +/** Unit test class for feature of aggregation options: DISTINCT, ALL, UNIQUE, DEDUPLICATION */ public class AggregationOptionTest { - @Test - public void selectDistinctFieldsShouldHaveAggregationOption() { - List fields = getSelectFields("SELECT DISTINCT gender, city FROM accounts"); - for (Field field: fields) { - Assert.assertEquals(field.getOption(), SQLAggregateOption.DISTINCT); - } + @Test + public void selectDistinctFieldsShouldHaveAggregationOption() { + List fields = getSelectFields("SELECT DISTINCT gender, city FROM accounts"); + for (Field field : fields) { + Assert.assertEquals(field.getOption(), SQLAggregateOption.DISTINCT); } + } - @Test - public void selectWithoutDistinctFieldsShouldNotHaveAggregationOption() { - List fields = getSelectFields("SELECT gender, city FROM accounts"); - for (Field field: fields) { - Assert.assertNull(field.getOption()); - } + @Test + public void selectWithoutDistinctFieldsShouldNotHaveAggregationOption() { + List fields = getSelectFields("SELECT gender, city FROM accounts"); + for (Field field : fields) { + Assert.assertNull(field.getOption()); } + } - @Test - public void selectDistinctWithoutGroupByShouldHaveGroupByItems() { - List> groupBys = getGroupBys("SELECT DISTINCT gender, city FROM accounts"); - Assert.assertFalse(groupBys.isEmpty()); - } + @Test + public void selectDistinctWithoutGroupByShouldHaveGroupByItems() { + List> groupBys = getGroupBys("SELECT DISTINCT gender, city FROM accounts"); + Assert.assertFalse(groupBys.isEmpty()); + } - @Test - public void selectWithoutDistinctWithoutGroupByShouldNotHaveGroupByItems() { - List> groupBys = getGroupBys("SELECT gender, city FROM accounts"); - Assert.assertTrue(groupBys.isEmpty()); - } + @Test + public void selectWithoutDistinctWithoutGroupByShouldNotHaveGroupByItems() { + List> groupBys = getGroupBys("SELECT gender, city FROM accounts"); + Assert.assertTrue(groupBys.isEmpty()); + } - private List> getGroupBys(String query) { - return getSelect(query).getGroupBys(); - } + private List> getGroupBys(String query) { + return getSelect(query).getGroupBys(); + } - private List getSelectFields(String query) { - return getSelect(query).getFields(); - } + private List getSelectFields(String query) { + return getSelect(query).getFields(); + } - private Select getSelect(String query) { - SQLQueryExpr queryExpr = SqlParserUtils.parse(query); - Select select = null; - try { - select = new SqlParser().parseSelect(queryExpr); - } catch (SqlParseException e) { - e.printStackTrace(); - } - return select; + private Select getSelect(String query) { + SQLQueryExpr queryExpr = SqlParserUtils.parse(query); + Select select = null; + try { + select = new SqlParser().parseSelect(queryExpr); + } catch (SqlParseException e) { + e.printStackTrace(); } + return select; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java index 89ac8b4563..3bb7b4a2b6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -40,196 +39,238 @@ public class DateFormatTest { - private static final String SELECT_CNT_FROM_DATE = "SELECT COUNT(*) AS c FROM dates "; - - @Test - public void simpleFormatCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY') < '2018'"); - - assertThat(q, hasQueryWithValue("fieldName", equalTo("creationDate"))); - assertThat(q, hasQueryWithValueGetter(MatcherUtils.featureValueOf("has format", equalTo("YYYY"), f->((RangeQueryBuilder)f).format()))); - } - - @Test - public void equalCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') = '2018-04-02'"); - - assertThat(q, hasQueryWithValueGetter(MatcherUtils.featureValueOf("has format", equalTo("YYYY-MM-dd"), f->((RangeQueryBuilder)f).format()))); - - // Equality query for date_format is created with a rangeQuery where the 'from' and 'to' values are equal to the value we are equating to - assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); // converting string to bytes ref as RangeQueryBuilder stores it this way - assertThat(q, hasQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - } - - @Test - public void orderByTest() { - String query = "SELECT agent, ip, date_format(utc_time, 'dd-MM-YYYY') date " + - "FROM opensearch_dashboards_sample_data_logs " + - "ORDER BY date_format(utc_time, 'dd-MM-YYYY') desc, ip"; - - Select select = getSelect(query); - - List orderBys = select.getOrderBys(); - assertThat(orderBys.size(), equalTo(2)); - - Order formula = orderBys.get(0); - - assertThat(formula.isScript(), is(true)); - assertThat(formula.getType(), is("DESC")); - assertThat(formula.getName(), containsString("DateTimeFormatter.ofPattern")); - - Order ip = orderBys.get(1); - - assertThat(ip.isScript(), is(false)); - assertThat(ip.getName(), is("ip")); - assertThat(ip.getType(), is("ASC")); - } - - @Test - public void groupByWithDescOrder() throws SqlParseException { - String query = "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + - "ORDER BY date_format(utc_time, 'dd-MM-YYYY') DESC"; - - JSONObject aggregation = getAggregation(query); - assertThat(aggregation.getInt("size"), is(getSelect(query).getRowCount())); - assertThat(aggregation.getJSONObject("order").getString("_key"), is("desc")); - } - - @Test - public void groupByWithAscOrder() throws SqlParseException { - String query = "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + - "ORDER BY date_format(utc_time, 'dd-MM-YYYY')"; - - JSONObject aggregation = getAggregation(query); - - assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); - } - - @Test - @Ignore("https://github.com/opendistro-for-elasticsearch/sql/issues/158") - public void groupByWithAndAlias() throws SqlParseException { - String query = "SELECT date_format(utc_time, 'dd-MM-YYYY') x, count(*) " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY x " + - "ORDER BY x"; - - JSONObject aggregation = getAggregation(query); - assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); - } - - public JSONObject getAggregation(String query) throws SqlParseException { - Select select = getSelect(query); - - Client client = mock(Client.class); - AggregationQueryAction queryAction = new AggregationQueryAction(client, select); - - String elasticDsl = queryAction.explain().explain(); - JSONObject elasticQuery = new JSONObject(elasticDsl); - - JSONObject aggregations = elasticQuery.getJSONObject("aggregations"); - String dateFormatAggregationKey = getScriptAggregationKey(aggregations, "date_format"); - - return aggregations.getJSONObject(dateFormatAggregationKey).getJSONObject("terms"); - } - - public static String getScriptAggregationKey(JSONObject aggregation, String prefix) { - return aggregation.keySet() - .stream() - .filter(x -> x.startsWith(prefix)) - .findFirst() - .orElseThrow(()-> new RuntimeException("Can't find key" + prefix + " in aggregation " + aggregation)); - } - - @Test - public void notEqualCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') <> '2018-04-02'"); - - assertThat(q, hasNotQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - assertThat(q, hasNotQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - } - - @Test - public void greaterThanCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') > '2018-04-02'"); - - assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - assertThat(q, hasQueryWithValue("includeLower", equalTo(false))); - assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); - } - - @Test - public void greaterThanOrEqualToCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') >= '2018-04-02'"); - - assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - assertThat(q, hasQueryWithValue("to", equalTo(null))); - assertThat(q, hasQueryWithValue("includeLower", equalTo(true))); - assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); + private static final String SELECT_CNT_FROM_DATE = "SELECT COUNT(*) AS c FROM dates "; + + @Test + public void simpleFormatCondition() { + List q = + query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY') < '2018'"); + + assertThat(q, hasQueryWithValue("fieldName", equalTo("creationDate"))); + assertThat( + q, + hasQueryWithValueGetter( + MatcherUtils.featureValueOf( + "has format", equalTo("YYYY"), f -> ((RangeQueryBuilder) f).format()))); + } + + @Test + public void equalCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') = '2018-04-02'"); + + assertThat( + q, + hasQueryWithValueGetter( + MatcherUtils.featureValueOf( + "has format", equalTo("YYYY-MM-dd"), f -> ((RangeQueryBuilder) f).format()))); + + // Equality query for date_format is created with a rangeQuery where the 'from' and 'to' values + // are equal to the value we are equating to + assertThat( + q, + hasQueryWithValue( + "from", + equalTo( + BytesRefs.toBytesRef( + "2018-04-02")))); // converting string to bytes ref as RangeQueryBuilder stores + // it this way + assertThat(q, hasQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + } + + @Test + public void orderByTest() { + String query = + "SELECT agent, ip, date_format(utc_time, 'dd-MM-YYYY') date " + + "FROM opensearch_dashboards_sample_data_logs " + + "ORDER BY date_format(utc_time, 'dd-MM-YYYY') desc, ip"; + + Select select = getSelect(query); + + List orderBys = select.getOrderBys(); + assertThat(orderBys.size(), equalTo(2)); + + Order formula = orderBys.get(0); + + assertThat(formula.isScript(), is(true)); + assertThat(formula.getType(), is("DESC")); + assertThat(formula.getName(), containsString("DateTimeFormatter.ofPattern")); + + Order ip = orderBys.get(1); + + assertThat(ip.isScript(), is(false)); + assertThat(ip.getName(), is("ip")); + assertThat(ip.getType(), is("ASC")); + } + + @Test + public void groupByWithDescOrder() throws SqlParseException { + String query = + "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + + "ORDER BY date_format(utc_time, 'dd-MM-YYYY') DESC"; + + JSONObject aggregation = getAggregation(query); + assertThat(aggregation.getInt("size"), is(getSelect(query).getRowCount())); + assertThat(aggregation.getJSONObject("order").getString("_key"), is("desc")); + } + + @Test + public void groupByWithAscOrder() throws SqlParseException { + String query = + "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + + "ORDER BY date_format(utc_time, 'dd-MM-YYYY')"; + + JSONObject aggregation = getAggregation(query); + + assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); + } + + @Test + @Ignore("https://github.com/opendistro-for-elasticsearch/sql/issues/158") + public void groupByWithAndAlias() throws SqlParseException { + String query = + "SELECT date_format(utc_time, 'dd-MM-YYYY') x, count(*) " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY x " + + "ORDER BY x"; + + JSONObject aggregation = getAggregation(query); + assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); + } + + public JSONObject getAggregation(String query) throws SqlParseException { + Select select = getSelect(query); + + Client client = mock(Client.class); + AggregationQueryAction queryAction = new AggregationQueryAction(client, select); + + String elasticDsl = queryAction.explain().explain(); + JSONObject elasticQuery = new JSONObject(elasticDsl); + + JSONObject aggregations = elasticQuery.getJSONObject("aggregations"); + String dateFormatAggregationKey = getScriptAggregationKey(aggregations, "date_format"); + + return aggregations.getJSONObject(dateFormatAggregationKey).getJSONObject("terms"); + } + + public static String getScriptAggregationKey(JSONObject aggregation, String prefix) { + return aggregation.keySet().stream() + .filter(x -> x.startsWith(prefix)) + .findFirst() + .orElseThrow( + () -> + new RuntimeException("Can't find key" + prefix + " in aggregation " + aggregation)); + } + + @Test + public void notEqualCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') <> '2018-04-02'"); + + assertThat(q, hasNotQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + assertThat(q, hasNotQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + } + + @Test + public void greaterThanCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') > '2018-04-02'"); + + assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + assertThat(q, hasQueryWithValue("includeLower", equalTo(false))); + assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); + } + + @Test + public void greaterThanOrEqualToCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') >= '2018-04-02'"); + + assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + assertThat(q, hasQueryWithValue("to", equalTo(null))); + assertThat(q, hasQueryWithValue("includeLower", equalTo(true))); + assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); + } + + @Test + public void timeZoneCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + + "WHERE date_format(creationDate, 'YYYY-MM-dd', 'America/Phoenix') >" + + " '2018-04-02'"); + + // Used hasProperty here as getter followed convention for obtaining ID and Feature Matcher was + // having issues with generic type to obtain value + assertThat(q, hasQueryWithValue("timeZone", hasProperty("id", equalTo("America/Phoenix")))); + } + + private List query(String sql) { + return translate(parseSql(sql)); + } + + private List translate(SQLQueryExpr expr) { + try { + Select select = new SqlParser().parseSelect(expr); + QueryBuilder whereQuery = QueryMaker.explain(select.getWhere(), select.isQuery); + return ((BoolQueryBuilder) whereQuery).filter(); + } catch (SqlParseException e) { + throw new ParserException("Illegal sql expr: " + expr.toString()); } + } - @Test - public void timeZoneCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd', 'America/Phoenix') > '2018-04-02'"); - - // Used hasProperty here as getter followed convention for obtaining ID and Feature Matcher was having issues with generic type to obtain value - assertThat(q, hasQueryWithValue("timeZone", hasProperty("id", equalTo("America/Phoenix")))); - } - - private List query(String sql) { - return translate(parseSql(sql)); - } - - private List translate(SQLQueryExpr expr) { - try { - Select select = new SqlParser().parseSelect(expr); - QueryBuilder whereQuery = QueryMaker.explain(select.getWhere(), select.isQuery); - return ((BoolQueryBuilder) whereQuery).filter(); - } catch (SqlParseException e) { - throw new ParserException("Illegal sql expr: " + expr.toString()); - } - } - - private SQLQueryExpr parseSql(String sql) { - ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("Illegal sql: " + sql); - } - return (SQLQueryExpr) expr; + private SQLQueryExpr parseSql(String sql) { + ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("Illegal sql: " + sql); } - - private Select getSelect(String query) { - try { - Select select = new SqlParser().parseSelect(parseSql(query)); - if (select.getRowCount() == null){ - select.setRowCount(Select.DEFAULT_LIMIT); - } - return select; - } catch (SqlParseException e) { - throw new RuntimeException(e); - } - } - - private Matcher> hasQueryWithValue(String name, Matcher matcher) { - return hasItem( - hasFieldWithValue("mustClauses", "has mustClauses", - hasItem(hasFieldWithValue(name, "has " + name, matcher)))); - } - - private Matcher> hasNotQueryWithValue(String name, Matcher matcher) { - return hasItem( - hasFieldWithValue("mustClauses", "has mustClauses", - hasItem(hasFieldWithValue("mustNotClauses", "has mustNotClauses", - hasItem(hasFieldWithValue(name, "has " + name, matcher)))))); - } - - private Matcher> hasQueryWithValueGetter(Matcher matcher) { - return hasItem( - hasFieldWithValue("mustClauses", "has mustClauses", - hasItem(matcher))); + return (SQLQueryExpr) expr; + } + + private Select getSelect(String query) { + try { + Select select = new SqlParser().parseSelect(parseSql(query)); + if (select.getRowCount() == null) { + select.setRowCount(Select.DEFAULT_LIMIT); + } + return select; + } catch (SqlParseException e) { + throw new RuntimeException(e); } + } + + private Matcher> hasQueryWithValue( + String name, Matcher matcher) { + return hasItem( + hasFieldWithValue( + "mustClauses", + "has mustClauses", + hasItem(hasFieldWithValue(name, "has " + name, matcher)))); + } + + private Matcher> hasNotQueryWithValue( + String name, Matcher matcher) { + return hasItem( + hasFieldWithValue( + "mustClauses", + "has mustClauses", + hasItem( + hasFieldWithValue( + "mustNotClauses", + "has mustNotClauses", + hasItem(hasFieldWithValue(name, "has " + name, matcher)))))); + } + + private Matcher> hasQueryWithValueGetter(Matcher matcher) { + return hasItem(hasFieldWithValue("mustClauses", "has mustClauses", hasItem(matcher))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java index 771b0ce1bf..cf1be90665 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertTrue; @@ -20,181 +19,132 @@ public class DateFunctionsTest { - private static SqlParser parser; - - @BeforeClass - public static void init() { parser = new SqlParser(); } - - /** - * The following unit tests will only cover a subset of the available date functions as the painless script is - * generated from the same template. More thorough testing will be done in integration tests since output will - * differ for each function. - */ - - @Test - public void yearInSelect() { - String query = "SELECT YEAR(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.year")); - } - - @Test - public void yearInWhere() { - String query = "SELECT * " + - "FROM dates " + - "WHERE YEAR(creationDate) > 2012"; - ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); - assertTrue( - scriptContainsString( - scriptFilter, - "doc['creationDate'].value.year")); - assertTrue( - scriptHasPattern( - scriptFilter, - "year_\\d+ > 2012")); - } - - @Test - public void weekOfYearInSelect() { - String query = "SELECT WEEK_OF_YEAR(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); - } - - @Test - public void weekOfYearInWhere() { - String query = "SELECT * " + - "FROM dates " + - "WHERE WEEK_OF_YEAR(creationDate) > 15"; - ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); - assertTrue( - scriptContainsString( - scriptFilter, - "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); - assertTrue( - scriptHasPattern( - scriptFilter, - "weekOfWeekyear_\\d+ > 15")); - } - - @Test - public void dayOfMonth() { - String query = "SELECT DAY_OF_MONTH(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.dayOfMonth")); - } - - @Test - public void hourOfDay() { - String query = "SELECT HOUR_OF_DAY(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.hour")); - } - - @Test - public void secondOfMinute() { - String query = "SELECT SECOND_OF_MINUTE(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.second")); - } - - @Test - public void month() { - String query = "SELECT MONTH(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.monthValue")); - } - - @Test - public void dayofmonth() { - String query = "SELECT DAY_OF_MONTH(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.dayOfMonth")); - } - - @Test - public void date() { - String query = "SELECT DATE(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "LocalDate.parse(doc['creationDate'].value.toString(),DateTimeFormatter.ISO_DATE_TIME)")); - } - - @Test - public void monthname() { - String query = "SELECT MONTHNAME(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.month")); - } - - @Test - public void timestamp() { - String query = "SELECT TIMESTAMP(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss')")); - } - - @Test - public void maketime() { - String query = "SELECT MAKETIME(1, 1, 1) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "LocalTime.of(1, 1, 1).format(DateTimeFormatter.ofPattern('HH:mm:ss'))")); - } - - @Test - public void now() { - String query = "SELECT NOW() FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "System.currentTimeMillis()")); - } - - @Test - public void curdate() { - String query = "SELECT CURDATE() FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "System.currentTimeMillis()")); - } + private static SqlParser parser; + + @BeforeClass + public static void init() { + parser = new SqlParser(); + } + + /** + * The following unit tests will only cover a subset of the available date functions as the + * painless script is generated from the same template. More thorough testing will be done in + * integration tests since output will differ for each function. + */ + @Test + public void yearInSelect() { + String query = "SELECT YEAR(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.year")); + } + + @Test + public void yearInWhere() { + String query = "SELECT * " + "FROM dates " + "WHERE YEAR(creationDate) > 2012"; + ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); + assertTrue(scriptContainsString(scriptFilter, "doc['creationDate'].value.year")); + assertTrue(scriptHasPattern(scriptFilter, "year_\\d+ > 2012")); + } + + @Test + public void weekOfYearInSelect() { + String query = "SELECT WEEK_OF_YEAR(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString( + scriptField, "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); + } + + @Test + public void weekOfYearInWhere() { + String query = "SELECT * " + "FROM dates " + "WHERE WEEK_OF_YEAR(creationDate) > 15"; + ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); + assertTrue( + scriptContainsString( + scriptFilter, "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); + assertTrue(scriptHasPattern(scriptFilter, "weekOfWeekyear_\\d+ > 15")); + } + + @Test + public void dayOfMonth() { + String query = "SELECT DAY_OF_MONTH(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.dayOfMonth")); + } + + @Test + public void hourOfDay() { + String query = "SELECT HOUR_OF_DAY(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.hour")); + } + + @Test + public void secondOfMinute() { + String query = "SELECT SECOND_OF_MINUTE(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.second")); + } + + @Test + public void month() { + String query = "SELECT MONTH(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.monthValue")); + } + + @Test + public void dayofmonth() { + String query = "SELECT DAY_OF_MONTH(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.dayOfMonth")); + } + + @Test + public void date() { + String query = "SELECT DATE(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString( + scriptField, + "LocalDate.parse(doc['creationDate'].value.toString(),DateTimeFormatter.ISO_DATE_TIME)")); + } + + @Test + public void monthname() { + String query = "SELECT MONTHNAME(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.month")); + } + + @Test + public void timestamp() { + String query = "SELECT TIMESTAMP(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString(scriptField, "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss')")); + } + + @Test + public void maketime() { + String query = "SELECT MAKETIME(1, 1, 1) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString( + scriptField, "LocalTime.of(1, 1, 1).format(DateTimeFormatter.ofPattern('HH:mm:ss'))")); + } + + @Test + public void now() { + String query = "SELECT NOW() FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "System.currentTimeMillis()")); + } + + @Test + public void curdate() { + String query = "SELECT CURDATE() FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "System.currentTimeMillis()")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java index d1e032ba1c..1b9662035d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.cursor; import static org.hamcrest.MatcherAssert.assertThat; @@ -19,40 +18,39 @@ public class DefaultCursorTest { - @Test - public void checkCursorType() { - DefaultCursor cursor = new DefaultCursor(); - assertEquals(cursor.getType(), CursorType.DEFAULT); - } - - - @Test - public void cursorShouldStartWithCursorTypeID() { - DefaultCursor cursor = new DefaultCursor(); - cursor.setRowsLeft(50); - cursor.setScrollId("dbdskbcdjksbcjkdsbcjk+//"); - cursor.setIndexPattern("myIndex"); - cursor.setFetchSize(500); - cursor.setFieldAliasMap(Collections.emptyMap()); - cursor.setColumns(new ArrayList<>()); - assertThat(cursor.generateCursorId(), startsWith(cursor.getType().getId()+ ":") ); - } - - @Test - public void nullCursorWhenRowLeftIsLessThanEqualZero() { - DefaultCursor cursor = new DefaultCursor(); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - - cursor.setRowsLeft(-10); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - } - - @Test - public void nullCursorWhenScrollIDIsNullOrEmpty() { - DefaultCursor cursor = new DefaultCursor(); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - - cursor.setScrollId(""); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - } + @Test + public void checkCursorType() { + DefaultCursor cursor = new DefaultCursor(); + assertEquals(cursor.getType(), CursorType.DEFAULT); + } + + @Test + public void cursorShouldStartWithCursorTypeID() { + DefaultCursor cursor = new DefaultCursor(); + cursor.setRowsLeft(50); + cursor.setScrollId("dbdskbcdjksbcjkdsbcjk+//"); + cursor.setIndexPattern("myIndex"); + cursor.setFetchSize(500); + cursor.setFieldAliasMap(Collections.emptyMap()); + cursor.setColumns(new ArrayList<>()); + assertThat(cursor.generateCursorId(), startsWith(cursor.getType().getId() + ":")); + } + + @Test + public void nullCursorWhenRowLeftIsLessThanEqualZero() { + DefaultCursor cursor = new DefaultCursor(); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + + cursor.setRowsLeft(-10); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + } + + @Test + public void nullCursorWhenScrollIDIsNullOrEmpty() { + DefaultCursor cursor = new DefaultCursor(); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + + cursor.setScrollId(""); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java index 205c63ad1d..6599d576b3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.domain; import static org.junit.Assert.assertEquals; @@ -18,28 +17,29 @@ import org.opensearch.sql.legacy.executor.format.Schema; public class ColumnTypeProviderTest { - @Test - public void singleESDataTypeShouldReturnCorrectSchemaType() { - assertEquals(Schema.Type.LONG, new ColumnTypeProvider(OpenSearchDataType.LONG).get(0)); - } - - @Test - public void productTypeShouldReturnCorrectSchemaType() { - ColumnTypeProvider columnTypeProvider = - new ColumnTypeProvider(new Product(ImmutableList.of(OpenSearchDataType.LONG, OpenSearchDataType.SHORT))); - assertEquals(Schema.Type.LONG, columnTypeProvider.get(0)); - assertEquals(Schema.Type.SHORT, columnTypeProvider.get(1)); - } - - @Test - public void unSupportedTypeShouldReturnDefaultSchemaType() { - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(SetOperator.UNION); - assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); - } - - @Test - public void providerWithoutColumnTypeShouldReturnDefaultSchemaType() { - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(); - assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); - } + @Test + public void singleESDataTypeShouldReturnCorrectSchemaType() { + assertEquals(Schema.Type.LONG, new ColumnTypeProvider(OpenSearchDataType.LONG).get(0)); + } + + @Test + public void productTypeShouldReturnCorrectSchemaType() { + ColumnTypeProvider columnTypeProvider = + new ColumnTypeProvider( + new Product(ImmutableList.of(OpenSearchDataType.LONG, OpenSearchDataType.SHORT))); + assertEquals(Schema.Type.LONG, columnTypeProvider.get(0)); + assertEquals(Schema.Type.SHORT, columnTypeProvider.get(1)); + } + + @Test + public void unSupportedTypeShouldReturnDefaultSchemaType() { + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(SetOperator.UNION); + assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); + } + + @Test + public void providerWithoutColumnTypeShouldReturnDefaultSchemaType() { + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(); + assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java index a668e9248a..533c2b2989 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,53 +22,52 @@ import org.opensearch.sql.legacy.executor.format.DeleteResultSet; import org.opensearch.sql.legacy.executor.format.Schema; - public class DeleteResultSetTest { - @Mock - NodeClient client; - - @Mock - Delete deleteQuery; + @Mock NodeClient client; - @Test - public void testDeleteResponseForJdbcFormat() throws IOException { + @Mock Delete deleteQuery; - String jsonDeleteResponse = "{\n" + - " \"took\" : 73,\n" + - " \"timed_out\" : false,\n" + - " \"total\" : 1,\n" + - " \"updated\" : 0,\n" + - " \"created\" : 0,\n" + - " \"deleted\" : 10,\n" + - " \"batches\" : 1,\n" + - " \"version_conflicts\" : 0,\n" + - " \"noops\" : 0,\n" + - " \"retries\" : {\n" + - " \"bulk\" : 0,\n" + - " \"search\" : 0\n" + - " },\n" + - " \"throttled_millis\" : 0,\n" + - " \"requests_per_second\" : -1.0,\n" + - " \"throttled_until_millis\" : 0,\n" + - " \"failures\" : [ ]\n" + - "}\n"; + @Test + public void testDeleteResponseForJdbcFormat() throws IOException { - XContentType xContentType = XContentType.JSON; - XContentParser parser = xContentType.xContent().createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - jsonDeleteResponse - ); + String jsonDeleteResponse = + "{\n" + + " \"took\" : 73,\n" + + " \"timed_out\" : false,\n" + + " \"total\" : 1,\n" + + " \"updated\" : 0,\n" + + " \"created\" : 0,\n" + + " \"deleted\" : 10,\n" + + " \"batches\" : 1,\n" + + " \"version_conflicts\" : 0,\n" + + " \"noops\" : 0,\n" + + " \"retries\" : {\n" + + " \"bulk\" : 0,\n" + + " \"search\" : 0\n" + + " },\n" + + " \"throttled_millis\" : 0,\n" + + " \"requests_per_second\" : -1.0,\n" + + " \"throttled_until_millis\" : 0,\n" + + " \"failures\" : [ ]\n" + + "}\n"; - BulkByScrollResponse deleteResponse = BulkByScrollResponse.fromXContent(parser); - DeleteResultSet deleteResultSet = new DeleteResultSet(client, deleteQuery, deleteResponse); - Schema schema = deleteResultSet.getSchema(); - DataRows dataRows = deleteResultSet.getDataRows(); + XContentType xContentType = XContentType.JSON; + XContentParser parser = + xContentType + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + jsonDeleteResponse); - assertThat(schema.getHeaders().size(), equalTo(1)); - assertThat(dataRows.getSize(), equalTo(1L)); - assertThat(dataRows.iterator().next().getData(DeleteResultSet.DELETED), equalTo(10L)); - } + BulkByScrollResponse deleteResponse = BulkByScrollResponse.fromXContent(parser); + DeleteResultSet deleteResultSet = new DeleteResultSet(client, deleteQuery, deleteResponse); + Schema schema = deleteResultSet.getSchema(); + DataRows dataRows = deleteResultSet.getDataRows(); + assertThat(schema.getHeaders().size(), equalTo(1)); + assertThat(dataRows.getSize(), equalTo(1L)); + assertThat(dataRows.iterator().next().getData(DeleteResultSet.DELETED), equalTo(10L)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java index d76aa84a5d..fa385fa14b 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor.format; import static org.hamcrest.MatcherAssert.assertThat; @@ -27,53 +26,65 @@ public class BindingTupleResultSetTest { - @Test - public void buildDataRowsFromBindingTupleShouldPass() { - assertThat(row( + @Test + public void buildDataRowsFromBindingTupleShouldPass() { + assertThat( + row( Arrays.asList( ColumnNode.builder().name("age").type(Schema.Type.INTEGER).build(), ColumnNode.builder().name("gender").type(Schema.Type.TEXT).build()), - Arrays.asList(BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), + Arrays.asList( + BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), BindingTuple.from(ImmutableMap.of("age", 31, "gender", "f")), BindingTuple.from(ImmutableMap.of("age", 39, "gender", "m")), BindingTuple.from(ImmutableMap.of("age", 39, "gender", "f")))), - containsInAnyOrder(rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "m"))), - rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "f"))), - rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "m"))), - rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "f"))))); - } + containsInAnyOrder( + rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "m"))), + rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "f"))), + rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "m"))), + rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "f"))))); + } - @Test - public void buildDataRowsFromBindingTupleIncludeLongValueShouldPass() { - assertThat(row( + @Test + public void buildDataRowsFromBindingTupleIncludeLongValueShouldPass() { + assertThat( + row( Arrays.asList( ColumnNode.builder().name("longValue").type(Schema.Type.LONG).build(), ColumnNode.builder().name("gender").type(Schema.Type.TEXT).build()), Arrays.asList( BindingTuple.from(ImmutableMap.of("longValue", Long.MAX_VALUE, "gender", "m")), BindingTuple.from(ImmutableMap.of("longValue", Long.MIN_VALUE, "gender", "f")))), - containsInAnyOrder( - rowContents(allOf(hasEntry("longValue", Long.MAX_VALUE), hasEntry("gender", (Object) "m"))), - rowContents(allOf(hasEntry("longValue", Long.MIN_VALUE), hasEntry("gender", (Object) "f"))))); - } + containsInAnyOrder( + rowContents( + allOf(hasEntry("longValue", Long.MAX_VALUE), hasEntry("gender", (Object) "m"))), + rowContents( + allOf(hasEntry("longValue", Long.MIN_VALUE), hasEntry("gender", (Object) "f"))))); + } - @Test - public void buildDataRowsFromBindingTupleIncludeDateShouldPass() { - assertThat(row( + @Test + public void buildDataRowsFromBindingTupleIncludeDateShouldPass() { + assertThat( + row( Arrays.asList( ColumnNode.builder().alias("dateValue").type(Schema.Type.DATE).build(), ColumnNode.builder().alias("gender").type(Schema.Type.TEXT).build()), Arrays.asList( BindingTuple.from(ImmutableMap.of("dateValue", 1529712000000L, "gender", "m")))), - containsInAnyOrder( - rowContents(allOf(hasEntry("dateValue", "2018-06-23 00:00:00.000"), hasEntry("gender", (Object) "m"))))); - } + containsInAnyOrder( + rowContents( + allOf( + hasEntry("dateValue", "2018-06-23 00:00:00.000"), + hasEntry("gender", (Object) "m"))))); + } - private static Matcher rowContents(Matcher> matcher) { - return featureValueOf("DataRows.Row", matcher, DataRows.Row::getContents); - } + private static Matcher rowContents(Matcher> matcher) { + return featureValueOf("DataRows.Row", matcher, DataRows.Row::getContents); + } - private List row(List columnNodes, List bindingTupleList) { - return ImmutableList.copyOf(BindingTupleResultSet.buildDataRows(columnNodes, bindingTupleList).iterator()); - } + private List row( + List columnNodes, List bindingTupleList) { + return ImmutableList.copyOf( + BindingTupleResultSet.buildDataRows(columnNodes, bindingTupleList).iterator()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java index b3afff2ce1..be6029f9af 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor.format; import static org.hamcrest.MatcherAssert.assertThat; @@ -19,21 +18,25 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; public class CSVResultsExtractorTest { - private final CSVResultsExtractor csvResultsExtractor = new CSVResultsExtractor(false, false); - - @Test - public void extractResultsFromBindingTupleListShouldPass() throws CsvExtractorException { - CSVResult csvResult = csv(Arrays.asList(BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), - BindingTuple.from(ImmutableMap.of("age", 31, "gender", "f")), - BindingTuple.from(ImmutableMap.of("age", 39, "gender", "m")), - BindingTuple.from(ImmutableMap.of("age", 39, "gender", "f"))), - Arrays.asList("age", "gender")); - - assertThat(csvResult.getHeaders(), contains("age", "gender")); - assertThat(csvResult.getLines(), contains("31,m", "31,f", "39,m", "39,f")); - } - - private CSVResult csv(List bindingTupleList, List fieldNames) throws CsvExtractorException { - return csvResultsExtractor.extractResults(bindingTupleList, false, ",", fieldNames); - } + private final CSVResultsExtractor csvResultsExtractor = new CSVResultsExtractor(false, false); + + @Test + public void extractResultsFromBindingTupleListShouldPass() throws CsvExtractorException { + CSVResult csvResult = + csv( + Arrays.asList( + BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), + BindingTuple.from(ImmutableMap.of("age", 31, "gender", "f")), + BindingTuple.from(ImmutableMap.of("age", 39, "gender", "m")), + BindingTuple.from(ImmutableMap.of("age", 39, "gender", "f"))), + Arrays.asList("age", "gender")); + + assertThat(csvResult.getHeaders(), contains("age", "gender")); + assertThat(csvResult.getLines(), contains("31,m", "31,f", "39,m", "39,f")); + } + + private CSVResult csv(List bindingTupleList, List fieldNames) + throws CsvExtractorException { + return csvResultsExtractor.extractResults(bindingTupleList, false, ",", fieldNames); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java index 2f802f4f91..37a0666ad3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.junit.Assert.assertEquals; @@ -21,69 +20,65 @@ @RunWith(MockitoJUnitRunner.class) public class BinaryExpressionTest extends ExpressionTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void addIntegerValueShouldPass() { - assertEquals(2, - apply(ScalarOperation.ADD, ref("intValue"), ref("intValue"))); - } - - @Test - public void multipleAddIntegerValueShouldPass() { - assertEquals(3, - apply(ScalarOperation.ADD, ref("intValue"), - of(ScalarOperation.ADD, ref("intValue"), ref("intValue")))); - } - - @Test - public void addDoubleValueShouldPass() { - assertEquals(4d, - apply(ScalarOperation.ADD, ref("doubleValue"), ref("doubleValue"))); - } - - @Test - public void addDoubleAndIntegerShouldPass() { - assertEquals(3d, - apply(ScalarOperation.ADD, ref("doubleValue"), ref("intValue"))); - } - - @Test - public void divideIntegerValueShouldPass() { - assertEquals(0, - apply(ScalarOperation.DIVIDE, ref("intValue"), ref("intValue2"))); - } - - @Test - public void divideIntegerAndDoubleShouldPass() { - assertEquals(0.5d, - apply(ScalarOperation.DIVIDE, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void subtractIntAndDoubleShouldPass() { - assertEquals(-1d, - apply(ScalarOperation.SUBTRACT, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void multiplyIntAndDoubleShouldPass() { - assertEquals(2d, - apply(ScalarOperation.MULTIPLY, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void modulesIntAndDoubleShouldPass() { - assertEquals(1d, - apply(ScalarOperation.MODULES, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void addIntAndStringShouldPass() { - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unexpected operation type: ADD(INTEGER_VALUE, STRING_VALUE)"); - - assertEquals(2, apply(ScalarOperation.ADD, literal(integerValue(1)), literal(stringValue("stringValue")))); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void addIntegerValueShouldPass() { + assertEquals(2, apply(ScalarOperation.ADD, ref("intValue"), ref("intValue"))); + } + + @Test + public void multipleAddIntegerValueShouldPass() { + assertEquals( + 3, + apply( + ScalarOperation.ADD, + ref("intValue"), + of(ScalarOperation.ADD, ref("intValue"), ref("intValue")))); + } + + @Test + public void addDoubleValueShouldPass() { + assertEquals(4d, apply(ScalarOperation.ADD, ref("doubleValue"), ref("doubleValue"))); + } + + @Test + public void addDoubleAndIntegerShouldPass() { + assertEquals(3d, apply(ScalarOperation.ADD, ref("doubleValue"), ref("intValue"))); + } + + @Test + public void divideIntegerValueShouldPass() { + assertEquals(0, apply(ScalarOperation.DIVIDE, ref("intValue"), ref("intValue2"))); + } + + @Test + public void divideIntegerAndDoubleShouldPass() { + assertEquals(0.5d, apply(ScalarOperation.DIVIDE, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void subtractIntAndDoubleShouldPass() { + assertEquals(-1d, apply(ScalarOperation.SUBTRACT, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void multiplyIntAndDoubleShouldPass() { + assertEquals(2d, apply(ScalarOperation.MULTIPLY, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void modulesIntAndDoubleShouldPass() { + assertEquals(1d, apply(ScalarOperation.MODULES, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void addIntAndStringShouldPass() { + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unexpected operation type: ADD(INTEGER_VALUE, STRING_VALUE)"); + + assertEquals( + 2, + apply(ScalarOperation.ADD, literal(integerValue(1)), literal(stringValue("stringValue")))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java index 2e75ee0c8b..3315024a13 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.junit.Assert.assertEquals; @@ -16,10 +15,12 @@ public class CompoundExpressionTest extends ExpressionTest { - @Test - public void absAndAddShouldPass() { - assertEquals(2.0d, apply(ScalarOperation.ABS, of(ScalarOperation.ADD, - literal(doubleValue(-1.0d)), - literal(integerValue(-1))))); - } + @Test + public void absAndAddShouldPass() { + assertEquals( + 2.0d, + apply( + ScalarOperation.ABS, + of(ScalarOperation.ADD, literal(doubleValue(-1.0d)), literal(integerValue(-1))))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java index ebe61109a7..34dc170a37 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -14,22 +13,21 @@ public class BasicCounterTest { - @Test - public void increment() { - BasicCounter counter = new BasicCounter(); - for (int i=0; i<5; ++i) { - counter.increment(); - } - - assertThat(counter.getValue(), equalTo(5L)); + @Test + public void increment() { + BasicCounter counter = new BasicCounter(); + for (int i = 0; i < 5; ++i) { + counter.increment(); } - @Test - public void incrementN() { - BasicCounter counter = new BasicCounter(); - counter.add(5); + assertThat(counter.getValue(), equalTo(5L)); + } - assertThat(counter.getValue(), equalTo(5L)); - } + @Test + public void incrementN() { + BasicCounter counter = new BasicCounter(); + counter.add(5); + assertThat(counter.getValue(), equalTo(5L)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java index 067143716d..c26740a04c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.junit.Assert.assertEquals; @@ -16,46 +15,45 @@ public class BucketPathTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - private final Path agg1 = Path.getAggPath("projects@NESTED"); - private final Path agg2 = Path.getAggPath("projects@FILTERED"); - private final Path metric = Path.getMetricPath("c"); + private final Path agg1 = Path.getAggPath("projects@NESTED"); + private final Path agg2 = Path.getAggPath("projects@FILTERED"); + private final Path metric = Path.getMetricPath("c"); - @Test - public void bucketPath() { - BucketPath bucketPath = new BucketPath(); - bucketPath.add(metric); - bucketPath.add(agg2); - bucketPath.add(agg1); + @Test + public void bucketPath() { + BucketPath bucketPath = new BucketPath(); + bucketPath.add(metric); + bucketPath.add(agg2); + bucketPath.add(agg1); - assertEquals("projects@NESTED>projects@FILTERED.c", bucketPath.getBucketPath()); - } + assertEquals("projects@NESTED>projects@FILTERED.c", bucketPath.getBucketPath()); + } - @Test - public void bucketPathEmpty() { - BucketPath bucketPath = new BucketPath(); + @Test + public void bucketPathEmpty() { + BucketPath bucketPath = new BucketPath(); - assertEquals("", bucketPath.getBucketPath()); - } + assertEquals("", bucketPath.getBucketPath()); + } - @Test - public void theLastMustBeMetric() { - BucketPath bucketPath = new BucketPath(); + @Test + public void theLastMustBeMetric() { + BucketPath bucketPath = new BucketPath(); - exceptionRule.expect(AssertionError.class); - exceptionRule.expectMessage("The last path in the bucket path must be Metric"); - bucketPath.add(agg1); - } + exceptionRule.expect(AssertionError.class); + exceptionRule.expectMessage("The last path in the bucket path must be Metric"); + bucketPath.add(agg1); + } - @Test - public void allTheOtherMustBeAgg() { - BucketPath bucketPath = new BucketPath(); + @Test + public void allTheOtherMustBeAgg() { + BucketPath bucketPath = new BucketPath(); - exceptionRule.expect(AssertionError.class); - exceptionRule.expectMessage("All the other path in the bucket path must be Agg"); - bucketPath.add(metric); - bucketPath.add(metric); - } + exceptionRule.expect(AssertionError.class); + exceptionRule.expectMessage("All the other path in the bucket path must be Agg"); + bucketPath.add(metric); + bucketPath.add(metric); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java index 9f6fcbcc6d..1260b551fb 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.hamcrest.MatcherAssert.assertThat; @@ -35,79 +34,78 @@ @RunWith(MockitoJUnitRunner.class) public class BindingTupleQueryPlannerExecuteTest { - @Mock - private Client client; - @Mock - private SearchResponse aggResponse; - @Mock - private ColumnTypeProvider columnTypeProvider; + @Mock private Client client; + @Mock private SearchResponse aggResponse; + @Mock private ColumnTypeProvider columnTypeProvider; - @Before - public void init() { - MockitoAnnotations.initMocks(this); + @Before + public void init() { + MockitoAnnotations.initMocks(this); - ActionFuture mockFuture = mock(ActionFuture.class); - when(client.execute(any(), any())).thenReturn(mockFuture); - when(mockFuture.actionGet()).thenAnswer(invocationOnMock -> aggResponse); - } + ActionFuture mockFuture = mock(ActionFuture.class); + when(client.execute(any(), any())).thenReturn(mockFuture); + when(mockFuture.actionGet()).thenAnswer(invocationOnMock -> aggResponse); + } - @Test - public void testAggregationShouldPass() { - assertThat(query("SELECT gender, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender", - mockSearchAggregation()), - containsInAnyOrder( - BindingTuple.from(ImmutableMap.of("gender", "m", "max", 20d, "min", 10d)), - BindingTuple.from(ImmutableMap.of("gender", "f", "max", 40d, "min", 20d)))); - } + @Test + public void testAggregationShouldPass() { + assertThat( + query( + "SELECT gender, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender", + mockSearchAggregation()), + containsInAnyOrder( + BindingTuple.from(ImmutableMap.of("gender", "m", "max", 20d, "min", 10d)), + BindingTuple.from(ImmutableMap.of("gender", "f", "max", 40d, "min", 20d)))); + } + protected List query(String sql, MockSearchAggregation mockAgg) { + doAnswer(mockAgg).when(aggResponse).getAggregations(); - protected List query(String sql, MockSearchAggregation mockAgg) { - doAnswer(mockAgg).when(aggResponse).getAggregations(); + BindingTupleQueryPlanner queryPlanner = + new BindingTupleQueryPlanner(client, SqlParserUtils.parse(sql), columnTypeProvider); + return queryPlanner.execute(); + } - BindingTupleQueryPlanner queryPlanner = - new BindingTupleQueryPlanner(client, SqlParserUtils.parse(sql), columnTypeProvider); - return queryPlanner.execute(); - } + private MockSearchAggregation mockSearchAggregation() { + return new MockSearchAggregation( + "{\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 507,\n" + + " \"min#min\": {\n" + + " \"value\": 10\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 20\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 493,\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"); + } - private MockSearchAggregation mockSearchAggregation() { - return new MockSearchAggregation("{\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 507,\n" - + " \"min#min\": {\n" - + " \"value\": 10\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 20\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 493,\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"); - } - - protected static class MockSearchAggregation implements Answer { - private final Aggregations aggregation; + protected static class MockSearchAggregation implements Answer { + private final Aggregations aggregation; - public MockSearchAggregation(String agg) { - aggregation = AggregationUtils.fromJson(agg); - } + public MockSearchAggregation(String agg) { + aggregation = AggregationUtils.fromJson(agg); + } - @Override - public Aggregations answer(InvocationOnMock invocationOnMock) throws Throwable { - return aggregation; - } + @Override + public Aggregations answer(InvocationOnMock invocationOnMock) throws Throwable { + return aggregation; } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java index 57530692d4..11e14e9b48 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.query; import static org.hamcrest.Matchers.equalTo; @@ -42,228 +41,225 @@ public class DefaultQueryActionTest { - private DefaultQueryAction queryAction; + private DefaultQueryAction queryAction; - private Client mockClient; + private Client mockClient; - private Select mockSelect; + private Select mockSelect; - private SearchRequestBuilder mockRequestBuilder; + private SearchRequestBuilder mockRequestBuilder; - @Before - public void initDefaultQueryAction() { + @Before + public void initDefaultQueryAction() { - mockClient = mock(Client.class); - mockSelect = mock(Select.class); - mockRequestBuilder = mock(SearchRequestBuilder.class); + mockClient = mock(Client.class); + mockSelect = mock(Select.class); + mockRequestBuilder = mock(SearchRequestBuilder.class); - List fields = new LinkedList<>(); - fields.add(new Field("balance", "bbb")); + List fields = new LinkedList<>(); + fields.add(new Field("balance", "bbb")); - doReturn(fields).when(mockSelect).getFields(); - doReturn(null).when(mockRequestBuilder).setFetchSource(any(String[].class), any(String[].class)); - doReturn(null).when(mockRequestBuilder).addScriptField(anyString(), any(Script.class)); + doReturn(fields).when(mockSelect).getFields(); + doReturn(null) + .when(mockRequestBuilder) + .setFetchSource(any(String[].class), any(String[].class)); + doReturn(null).when(mockRequestBuilder).addScriptField(anyString(), any(Script.class)); - queryAction = new DefaultQueryAction(mockClient, mockSelect); - queryAction.initialize(mockRequestBuilder); - } + queryAction = new DefaultQueryAction(mockClient, mockSelect); + queryAction.initialize(mockRequestBuilder); + } - @After - public void cleanup() { - LocalClusterState.state(null); - } + @After + public void cleanup() { + LocalClusterState.state(null); + } - @Test - public void scriptFieldWithTwoParams() throws SqlParseException { + @Test + public void scriptFieldWithTwoParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - false, true, false)); + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", false, true, false)); - queryAction.setFields(fields); + queryAction.setFields(fields); - final Optional> fieldNames = queryAction.getFieldNames(); - Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); - Assert.assertThat(fieldNames.get().size(), equalTo(1)); - Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); + final Optional> fieldNames = queryAction.getFieldNames(); + Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); + Assert.assertThat(fieldNames.get().size(), equalTo(1)); + Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); - Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); - } + Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); + } - @Test - public void scriptFieldWithThreeParams() throws SqlParseException { + @Test + public void scriptFieldWithThreeParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - true, true, false)); + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", true, true, false)); - queryAction.setFields(fields); + queryAction.setFields(fields); - final Optional> fieldNames = queryAction.getFieldNames(); - Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); - Assert.assertThat(fieldNames.get().size(), equalTo(1)); - Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); + final Optional> fieldNames = queryAction.getFieldNames(); + Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); + Assert.assertThat(fieldNames.get().size(), equalTo(1)); + Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); - Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); - } + Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); + } - @Test(expected = SqlParseException.class) - public void scriptFieldWithLessThanTwoParams() throws SqlParseException { + @Test(expected = SqlParseException.class) + public void scriptFieldWithLessThanTwoParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - false, false, false)); + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", false, false, false)); - queryAction.setFields(fields); - } + queryAction.setFields(fields); + } - @Test - public void scriptFieldWithMoreThanThreeParams() throws SqlParseException { + @Test + public void scriptFieldWithMoreThanThreeParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - false, true, true)); - - queryAction.setFields(fields); - } - - @Test - public void testIfScrollShouldBeOpenWithDifferentFormats() { - int settingFetchSize = 500; - TimeValue timeValue = new TimeValue(120000); - int limit = 2300; - mockLocalClusterStateAndInitializeMetrics(timeValue); - - doReturn(limit).when(mockSelect).getRowCount(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); - queryAction.setSqlRequest(mockSqlRequest); - - Format[] formats = new Format[] {Format.CSV, Format.RAW, Format.JSON, Format.TABLE}; - for (Format format : formats) { - queryAction.setFormat(format); - queryAction.checkAndSetScroll(); - } - - Mockito.verify(mockRequestBuilder, times(4)).setSize(limit); - Mockito.verify(mockRequestBuilder, never()).setScroll(any(TimeValue.class)); - - queryAction.setFormat(Format.JDBC); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); - - } + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", false, true, true)); - @Test - public void testIfScrollShouldBeOpen() { - int settingFetchSize = 500; - TimeValue timeValue = new TimeValue(120000); - int limit = 2300; + queryAction.setFields(fields); + } - doReturn(limit).when(mockSelect).getRowCount(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); - queryAction.setSqlRequest(mockSqlRequest); - queryAction.setFormat(Format.JDBC); + @Test + public void testIfScrollShouldBeOpenWithDifferentFormats() { + int settingFetchSize = 500; + TimeValue timeValue = new TimeValue(120000); + int limit = 2300; + mockLocalClusterStateAndInitializeMetrics(timeValue); - mockLocalClusterStateAndInitializeMetrics(timeValue); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); + doReturn(limit).when(mockSelect).getRowCount(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); + queryAction.setSqlRequest(mockSqlRequest); + Format[] formats = new Format[] {Format.CSV, Format.RAW, Format.JSON, Format.TABLE}; + for (Format format : formats) { + queryAction.setFormat(format); + queryAction.checkAndSetScroll(); } - @Test - public void testIfScrollShouldBeOpenWithDifferentFetchSize() { - TimeValue timeValue = new TimeValue(120000); - int limit = 2300; - mockLocalClusterStateAndInitializeMetrics(timeValue); - - doReturn(limit).when(mockSelect).getRowCount(); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - queryAction.setSqlRequest(mockSqlRequest); - queryAction.setFormat(Format.JDBC); - - int[] fetchSizes = new int[] {0, -10}; - for (int fetch : fetchSizes) { - doReturn(fetch).when(mockSqlRequest).fetchSize(); - queryAction.checkAndSetScroll(); - } - Mockito.verify(mockRequestBuilder, times(2)).setSize(limit); - Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); - - int userFetchSize = 20; - doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(20); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); + Mockito.verify(mockRequestBuilder, times(4)).setSize(limit); + Mockito.verify(mockRequestBuilder, never()).setScroll(any(TimeValue.class)); + + queryAction.setFormat(Format.JDBC); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + } + + @Test + public void testIfScrollShouldBeOpen() { + int settingFetchSize = 500; + TimeValue timeValue = new TimeValue(120000); + int limit = 2300; + + doReturn(limit).when(mockSelect).getRowCount(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); + queryAction.setSqlRequest(mockSqlRequest); + queryAction.setFormat(Format.JDBC); + + mockLocalClusterStateAndInitializeMetrics(timeValue); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + } + + @Test + public void testIfScrollShouldBeOpenWithDifferentFetchSize() { + TimeValue timeValue = new TimeValue(120000); + int limit = 2300; + mockLocalClusterStateAndInitializeMetrics(timeValue); + + doReturn(limit).when(mockSelect).getRowCount(); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + queryAction.setSqlRequest(mockSqlRequest); + queryAction.setFormat(Format.JDBC); + + int[] fetchSizes = new int[] {0, -10}; + for (int fetch : fetchSizes) { + doReturn(fetch).when(mockSqlRequest).fetchSize(); + queryAction.checkAndSetScroll(); } - - - @Test - public void testIfScrollShouldBeOpenWithDifferentValidFetchSizeAndLimit() { - TimeValue timeValue = new TimeValue(120000); - mockLocalClusterStateAndInitializeMetrics(timeValue); - - int limit = 2300; - doReturn(limit).when(mockSelect).getRowCount(); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - - /** fetchSize <= LIMIT - open scroll*/ - int userFetchSize = 1500; - doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); - queryAction.setSqlRequest(mockSqlRequest); - queryAction.setFormat(Format.JDBC); - - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(userFetchSize); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); - - /** fetchSize > LIMIT - no scroll */ - userFetchSize = 5000; - doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); - mockRequestBuilder = mock(SearchRequestBuilder.class); - queryAction.initialize(mockRequestBuilder); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(limit); - Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); + Mockito.verify(mockRequestBuilder, times(2)).setSize(limit); + Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); + + int userFetchSize = 20; + doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(20); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + } + + @Test + public void testIfScrollShouldBeOpenWithDifferentValidFetchSizeAndLimit() { + TimeValue timeValue = new TimeValue(120000); + mockLocalClusterStateAndInitializeMetrics(timeValue); + + int limit = 2300; + doReturn(limit).when(mockSelect).getRowCount(); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + + /** fetchSize <= LIMIT - open scroll */ + int userFetchSize = 1500; + doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); + queryAction.setSqlRequest(mockSqlRequest); + queryAction.setFormat(Format.JDBC); + + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(userFetchSize); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + + /** fetchSize > LIMIT - no scroll */ + userFetchSize = 5000; + doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); + mockRequestBuilder = mock(SearchRequestBuilder.class); + queryAction.initialize(mockRequestBuilder); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(limit); + Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); + } + + private void mockLocalClusterStateAndInitializeMetrics(TimeValue time) { + LocalClusterState mockLocalClusterState = mock(LocalClusterState.class); + LocalClusterState.state(mockLocalClusterState); + doReturn(time).when(mockLocalClusterState).getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + doReturn(3600L) + .when(mockLocalClusterState) + .getSettingValue(Settings.Key.METRICS_ROLLING_WINDOW); + doReturn(2L).when(mockLocalClusterState).getSettingValue(Settings.Key.METRICS_ROLLING_INTERVAL); + + Metrics.getInstance().registerDefaultMetrics(); + } + + private Field createScriptField( + final String name, + final String script, + final boolean addScriptLanguage, + final boolean addScriptParam, + final boolean addRedundantParam) { + + final List params = new ArrayList<>(); + + params.add(new KVValue("alias", name)); + if (addScriptLanguage) { + params.add(new KVValue("painless")); } - - private void mockLocalClusterStateAndInitializeMetrics(TimeValue time) { - LocalClusterState mockLocalClusterState = mock(LocalClusterState.class); - LocalClusterState.state(mockLocalClusterState); - doReturn(time).when(mockLocalClusterState).getSettingValue( - Settings.Key.SQL_CURSOR_KEEP_ALIVE); - doReturn(3600L).when(mockLocalClusterState).getSettingValue( - Settings.Key.METRICS_ROLLING_WINDOW); - doReturn(2L).when(mockLocalClusterState).getSettingValue( - Settings.Key.METRICS_ROLLING_INTERVAL); - - Metrics.getInstance().registerDefaultMetrics(); - + if (addScriptParam) { + params.add(new KVValue(script)); } - - private Field createScriptField(final String name, final String script, final boolean addScriptLanguage, - final boolean addScriptParam, final boolean addRedundantParam) { - - final List params = new ArrayList<>(); - - params.add(new KVValue("alias", name)); - if (addScriptLanguage) { - params.add(new KVValue("painless")); - } - if (addScriptParam) { - params.add(new KVValue(script)); - } - if (addRedundantParam) { - params.add(new KVValue("Fail the test")); - } - - return new MethodField("script", params, null, null); + if (addRedundantParam) { + params.add(new KVValue("Fail the test")); } + + return new MethodField("script", params, null, null); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java index 0c16a3264a..168725ed11 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.inline; import static org.hamcrest.MatcherAssert.assertThat; @@ -29,101 +28,111 @@ public class AliasInliningTests { - private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; - @Before - public void setUp() throws IOException { - URL url = Resources.getResource(TEST_MAPPING_FILE); - String mappings = Resources.toString(url, Charsets.UTF_8); - mockLocalClusterState(mappings); - } - - @Test - public void orderByAliasedFieldTest() throws SqlParseException { - String originalQuery = "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "ORDER BY date DESC"; - String originalDsl = parseAsSimpleQuery(originalQuery); - - String rewrittenQuery = - "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "ORDER BY utc_time DESC"; - - String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); - - assertThat(originalDsl, equalTo(rewrittenDsl)); - } - - @Test - public void orderByAliasedScriptedField() throws SqlParseException { - String originalDsl = parseAsSimpleQuery("SELECT date_format(birthday, 'dd-MM-YYYY') date " + - "FROM bank " + - "ORDER BY date"); - String rewrittenQuery = "SELECT date_format(birthday, 'dd-MM-YYYY') date " + - "FROM bank " + - "ORDER BY date_format(birthday, 'dd-MM-YYYY')"; - - String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); - assertThat(originalDsl, equalTo(rewrittenDsl)); - } - - @Test - public void groupByAliasedFieldTest() throws SqlParseException { - String originalQuery = "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date"; - - String originalDsl = parseAsAggregationQuery(originalQuery); - - String rewrittenQuery = "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY utc_time DESC"; - - String rewrittenDsl = parseAsAggregationQuery(rewrittenQuery); - - assertThat(originalDsl, equalTo(rewrittenDsl)); - } - - @Test - public void groupAndSortBySameExprAlias() throws SqlParseException { - String query = "SELECT date_format(timestamp, 'yyyy-MM') opensearch-table.timestamp_tg, COUNT(*) count, COUNT(DistanceKilometers) opensearch-table.DistanceKilometers_count\n" + - "FROM opensearch_dashboards_sample_data_flights\n" + - "GROUP BY date_format(timestamp, 'yyyy-MM')\n" + - "ORDER BY date_format(timestamp, 'yyyy-MM') DESC\n" + - "LIMIT 2500"; - String dsl = parseAsAggregationQuery(query); - - JSONObject parseQuery = new JSONObject(dsl); - - assertThat(parseQuery.query("/aggregations/opensearch-table.timestamp_tg/terms/script"), notNullValue()); - - } - - @Test - public void groupByAndSortAliased() throws SqlParseException { - String dsl = parseAsAggregationQuery( - "SELECT date_format(utc_time, 'dd-MM-YYYY') date " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date " + - "ORDER BY date DESC"); - - JSONObject parsedQuery = new JSONObject(dsl); - - JSONObject query = (JSONObject)parsedQuery.query("/aggregations/date/terms/script"); - - assertThat(query, notNullValue()); - } - - private String parseAsSimpleQuery(String originalQuery) throws SqlParseException { - SqlRequest sqlRequest = new SqlRequest(originalQuery, new JSONObject()); - DefaultQueryAction defaultQueryAction = new DefaultQueryAction(mock(Client.class), - new SqlParser().parseSelect(parse(originalQuery))); - defaultQueryAction.setSqlRequest(sqlRequest); - return defaultQueryAction.explain().explain(); - } - - private String parseAsAggregationQuery(String originalQuery) throws SqlParseException { - return new AggregationQueryAction(mock(Client.class), - new SqlParser().parseSelect(parse(originalQuery))).explain().explain(); - } + private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; + + @Before + public void setUp() throws IOException { + URL url = Resources.getResource(TEST_MAPPING_FILE); + String mappings = Resources.toString(url, Charsets.UTF_8); + mockLocalClusterState(mappings); + } + + @Test + public void orderByAliasedFieldTest() throws SqlParseException { + String originalQuery = + "SELECT utc_time date " + + "FROM opensearch_dashboards_sample_data_logs " + + "ORDER BY date DESC"; + String originalDsl = parseAsSimpleQuery(originalQuery); + + String rewrittenQuery = + "SELECT utc_time date " + + "FROM opensearch_dashboards_sample_data_logs " + + "ORDER BY utc_time DESC"; + + String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); + + assertThat(originalDsl, equalTo(rewrittenDsl)); + } + + @Test + public void orderByAliasedScriptedField() throws SqlParseException { + String originalDsl = + parseAsSimpleQuery( + "SELECT date_format(birthday, 'dd-MM-YYYY') date " + "FROM bank " + "ORDER BY date"); + String rewrittenQuery = + "SELECT date_format(birthday, 'dd-MM-YYYY') date " + + "FROM bank " + + "ORDER BY date_format(birthday, 'dd-MM-YYYY')"; + + String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); + assertThat(originalDsl, equalTo(rewrittenDsl)); + } + + @Test + public void groupByAliasedFieldTest() throws SqlParseException { + String originalQuery = + "SELECT utc_time date " + "FROM opensearch_dashboards_sample_data_logs " + "GROUP BY date"; + + String originalDsl = parseAsAggregationQuery(originalQuery); + + String rewrittenQuery = + "SELECT utc_time date " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY utc_time DESC"; + + String rewrittenDsl = parseAsAggregationQuery(rewrittenQuery); + + assertThat(originalDsl, equalTo(rewrittenDsl)); + } + + @Test + public void groupAndSortBySameExprAlias() throws SqlParseException { + String query = + "SELECT date_format(timestamp, 'yyyy-MM') opensearch-table.timestamp_tg, COUNT(*) count," + + " COUNT(DistanceKilometers) opensearch-table.DistanceKilometers_count\n" + + "FROM opensearch_dashboards_sample_data_flights\n" + + "GROUP BY date_format(timestamp, 'yyyy-MM')\n" + + "ORDER BY date_format(timestamp, 'yyyy-MM') DESC\n" + + "LIMIT 2500"; + String dsl = parseAsAggregationQuery(query); + + JSONObject parseQuery = new JSONObject(dsl); + + assertThat( + parseQuery.query("/aggregations/opensearch-table.timestamp_tg/terms/script"), + notNullValue()); + } + + @Test + public void groupByAndSortAliased() throws SqlParseException { + String dsl = + parseAsAggregationQuery( + "SELECT date_format(utc_time, 'dd-MM-YYYY') date " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY date " + + "ORDER BY date DESC"); + + JSONObject parsedQuery = new JSONObject(dsl); + + JSONObject query = (JSONObject) parsedQuery.query("/aggregations/date/terms/script"); + + assertThat(query, notNullValue()); + } + + private String parseAsSimpleQuery(String originalQuery) throws SqlParseException { + SqlRequest sqlRequest = new SqlRequest(originalQuery, new JSONObject()); + DefaultQueryAction defaultQueryAction = + new DefaultQueryAction( + mock(Client.class), new SqlParser().parseSelect(parse(originalQuery))); + defaultQueryAction.setSqlRequest(sqlRequest); + return defaultQueryAction.explain().explain(); + } + + private String parseAsAggregationQuery(String originalQuery) throws SqlParseException { + return new AggregationQueryAction( + mock(Client.class), new SqlParser().parseSelect(parse(originalQuery))) + .explain() + .explain(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java index b0c6b8a2d8..c7e7f22d5c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.hamcrest.MatcherAssert.assertThat; @@ -15,28 +14,29 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * To test the functionality of {@link StringUtils#unquoteSingleField} - * and {@link StringUtils#unquoteFullColumn(String, String)} + * To test the functionality of {@link StringUtils#unquoteSingleField} and {@link + * StringUtils#unquoteFullColumn(String, String)} */ public class BackticksUnquoterTest { - @Test - public void assertNotQuotedStringShouldKeepTheSame() { - assertThat(unquoteSingleField("identifier"), equalTo("identifier")); - assertThat(unquoteFullColumn("identifier"), equalTo("identifier")); - } - - @Test - public void assertStringWithOneBackTickShouldKeepTheSame() { - assertThat(unquoteSingleField("`identifier"), equalTo("`identifier")); - assertThat(unquoteFullColumn("`identifier"), equalTo("`identifier")); - } - - @Test - public void assertBackticksQuotedStringShouldBeUnquoted() { - assertThat("identifier", equalTo(unquoteSingleField("`identifier`"))); - - assertThat("identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.`identifier2`"))); - assertThat("identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.identifier2"))); - } + @Test + public void assertNotQuotedStringShouldKeepTheSame() { + assertThat(unquoteSingleField("identifier"), equalTo("identifier")); + assertThat(unquoteFullColumn("identifier"), equalTo("identifier")); + } + + @Test + public void assertStringWithOneBackTickShouldKeepTheSame() { + assertThat(unquoteSingleField("`identifier"), equalTo("`identifier")); + assertThat(unquoteFullColumn("`identifier"), equalTo("`identifier")); + } + + @Test + public void assertBackticksQuotedStringShouldBeUnquoted() { + assertThat("identifier", equalTo(unquoteSingleField("`identifier`"))); + + assertThat( + "identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.`identifier2`"))); + assertThat("identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.identifier2"))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java index 58fa8793ff..85da1d990f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import com.fasterxml.jackson.core.JsonFactory; @@ -41,42 +40,52 @@ import org.opensearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; public class AggregationUtils { - private final static List entryList = - new ImmutableMap.Builder>().put( - MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) - .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) - .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) - .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) - .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) - .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) - .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) - .put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) - .put(PercentilesBucketPipelineAggregationBuilder.NAME, - (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) - .put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) - .build() - .entrySet() - .stream() - .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), - entry.getValue())) - .collect(Collectors.toList()); - private final static NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(entryList); + private static final List entryList = + new ImmutableMap.Builder>() + .put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) + .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) + .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) + .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) + .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) + .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) + .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) + .put( + ValueCountAggregationBuilder.NAME, + (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) + .put( + PercentilesBucketPipelineAggregationBuilder.NAME, + (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) + .put( + DateHistogramAggregationBuilder.NAME, + (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) + .build() + .entrySet() + .stream() + .map( + entry -> + new NamedXContentRegistry.Entry( + Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + private static final NamedXContentRegistry namedXContentRegistry = + new NamedXContentRegistry(entryList); - /** - * Populate {@link Aggregations} from JSON string. - * @param json json string - * @return {@link Aggregations} - */ - public static Aggregations fromJson(String json) { - try { - XContentParser xContentParser = new JsonXContentParser( - namedXContentRegistry, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(json)); - xContentParser.nextToken(); - return Aggregations.fromXContent(xContentParser); - } catch (IOException e) { - throw new RuntimeException(e); - } + /** + * Populate {@link Aggregations} from JSON string. + * + * @param json json string + * @return {@link Aggregations} + */ + public static Aggregations fromJson(String json) { + try { + XContentParser xContentParser = + new JsonXContentParser( + namedXContentRegistry, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(json)); + xContentParser.nextToken(); + return Aggregations.fromXContent(xContentParser); + } catch (IOException e) { + throw new RuntimeException(e); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java index d627cebb27..7578720624 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import static java.util.Collections.emptyList; @@ -58,206 +57,211 @@ public class CheckScriptContents { - private static SQLExpr queryToExpr(String query) { - return new ElasticSqlExprParser(query).expr(); - } + private static SQLExpr queryToExpr(String query) { + return new ElasticSqlExprParser(query).expr(); + } - public static ScriptField getScriptFieldFromQuery(String query) { - try { - Client mockClient = mock(Client.class); - stubMockClient(mockClient); - QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); - SqlElasticRequestBuilder requestBuilder = queryAction.explain(); + public static ScriptField getScriptFieldFromQuery(String query) { + try { + Client mockClient = mock(Client.class); + stubMockClient(mockClient); + QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); + SqlElasticRequestBuilder requestBuilder = queryAction.explain(); - SearchRequestBuilder request = (SearchRequestBuilder) requestBuilder.getBuilder(); - List scriptFields = request.request().source().scriptFields(); + SearchRequestBuilder request = (SearchRequestBuilder) requestBuilder.getBuilder(); + List scriptFields = request.request().source().scriptFields(); - assertTrue(scriptFields.size() == 1); + assertTrue(scriptFields.size() == 1); - return scriptFields.get(0); + return scriptFields.get(0); - } catch (SQLFeatureNotSupportedException | SqlParseException | SQLFeatureDisabledException e) { - throw new ParserException("Unable to parse query: " + query, e); - } + } catch (SQLFeatureNotSupportedException | SqlParseException | SQLFeatureDisabledException e) { + throw new ParserException("Unable to parse query: " + query, e); } + } - public static ScriptFilter getScriptFilterFromQuery(String query, SqlParser parser) { - try { - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Where where = select.getWhere(); - - assertTrue(where.getWheres().size() == 1); - assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); + public static ScriptFilter getScriptFilterFromQuery(String query, SqlParser parser) { + try { + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Where where = select.getWhere(); - return (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); + assertTrue(where.getWheres().size() == 1); + assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); - } catch (SqlParseException e) { - throw new ParserException("Unable to parse query: " + query); - } - } + return (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); - public static boolean scriptContainsString(ScriptField scriptField, String string) { - return scriptField.script().getIdOrCode().contains(string); + } catch (SqlParseException e) { + throw new ParserException("Unable to parse query: " + query); } + } - public static boolean scriptContainsString(ScriptFilter scriptFilter, String string) { - return scriptFilter.getScript().contains(string); - } + public static boolean scriptContainsString(ScriptField scriptField, String string) { + return scriptField.script().getIdOrCode().contains(string); + } - public static boolean scriptHasPattern(ScriptField scriptField, String regex) { - Pattern pattern = Pattern.compile(regex); - Matcher matcher = pattern.matcher(scriptField.script().getIdOrCode()); - return matcher.find(); - } + public static boolean scriptContainsString(ScriptFilter scriptFilter, String string) { + return scriptFilter.getScript().contains(string); + } - public static boolean scriptHasPattern(ScriptFilter scriptFilter, String regex) { - Pattern pattern = Pattern.compile(regex); - Matcher matcher = pattern.matcher(scriptFilter.getScript()); - return matcher.find(); - } + public static boolean scriptHasPattern(ScriptField scriptField, String regex) { + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(scriptField.script().getIdOrCode()); + return matcher.find(); + } - public static void stubMockClient(Client mockClient) { - String mappings = "{\n" + - " \"opensearch-sql_test_index_bank\": {\n" + - " \"mappings\": {\n" + - " \"account\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"birthdate\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"email\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"male\": {\n" + - " \"type\": \"boolean\"\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"raw\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - // ==== All required by IndexMetaData.fromXContent() ==== - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 5,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\",\n" + - " \"aliases_version\": \"1\"\n" + - //======================================================= - " }\n" + - "}"; + public static boolean scriptHasPattern(ScriptFilter scriptFilter, String regex) { + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(scriptFilter.getScript()); + return matcher.find(); + } - AdminClient mockAdminClient = mock(AdminClient.class); - when(mockClient.admin()).thenReturn(mockAdminClient); + public static void stubMockClient(Client mockClient) { + String mappings = + "{\n" + + " \"opensearch-sql_test_index_bank\": {\n" + + " \"mappings\": {\n" + + " \"account\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"birthdate\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"email\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"male\": {\n" + + " \"type\": \"boolean\"\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"raw\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + + // ==== All required by IndexMetaData.fromXContent() ==== + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 5,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\",\n" + + " \"aliases_version\": \"1\"\n" + + + // ======================================================= + " }\n" + + "}"; - IndicesAdminClient mockIndexClient = mock(IndicesAdminClient.class); - when(mockAdminClient.indices()).thenReturn(mockIndexClient); + AdminClient mockAdminClient = mock(AdminClient.class); + when(mockClient.admin()).thenReturn(mockAdminClient); - ActionFuture mockActionResp = mock(ActionFuture.class); - when(mockIndexClient.getFieldMappings(any(GetFieldMappingsRequest.class))).thenReturn(mockActionResp); - mockLocalClusterState(mappings); - } + IndicesAdminClient mockIndexClient = mock(IndicesAdminClient.class); + when(mockAdminClient.indices()).thenReturn(mockIndexClient); - public static XContentParser createParser(String mappings) throws IOException { - return XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - mappings - ); - } + ActionFuture mockActionResp = mock(ActionFuture.class); + when(mockIndexClient.getFieldMappings(any(GetFieldMappingsRequest.class))) + .thenReturn(mockActionResp); + mockLocalClusterState(mappings); + } - public static void mockLocalClusterState(String mappings) { - LocalClusterState.state().setClusterService(mockClusterService(mappings)); - LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); - LocalClusterState.state().setPluginSettings(mockPluginSettings()); - } + public static XContentParser createParser(String mappings) throws IOException { + return XContentType.JSON + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, mappings); + } - public static ClusterService mockClusterService(String mappings) { - ClusterService mockService = mock(ClusterService.class); - ClusterState mockState = mock(ClusterState.class); - Metadata mockMetaData = mock(Metadata.class); + public static void mockLocalClusterState(String mappings) { + LocalClusterState.state().setClusterService(mockClusterService(mappings)); + LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); + LocalClusterState.state().setPluginSettings(mockPluginSettings()); + } - when(mockService.state()).thenReturn(mockState); - when(mockState.metadata()).thenReturn(mockMetaData); - try { - when(mockMetaData.findMappings(any(), any())).thenReturn( - Map.of(TestsConstants.TEST_INDEX_BANK, IndexMetadata.fromXContent( - createParser(mappings)).mapping())); - } - catch (IOException e) { - throw new IllegalStateException(e); - } - return mockService; - } + public static ClusterService mockClusterService(String mappings) { + ClusterService mockService = mock(ClusterService.class); + ClusterState mockState = mock(ClusterState.class); + Metadata mockMetaData = mock(Metadata.class); - public static IndexNameExpressionResolver mockIndexNameExpressionResolver() { - IndexNameExpressionResolver mockResolver = mock(IndexNameExpressionResolver.class); - when(mockResolver.concreteIndexNames(any(), any(), anyBoolean(), anyString())).thenAnswer( - (Answer) invocation -> { - // Return index expression directly without resolving - Object indexExprs = invocation.getArguments()[3]; - if (indexExprs instanceof String) { - return new String[]{ (String) indexExprs }; - } - return (String[]) indexExprs; - } - ); - return mockResolver; + when(mockService.state()).thenReturn(mockState); + when(mockState.metadata()).thenReturn(mockMetaData); + try { + when(mockMetaData.findMappings(any(), any())) + .thenReturn( + Map.of( + TestsConstants.TEST_INDEX_BANK, + IndexMetadata.fromXContent(createParser(mappings)).mapping())); + } catch (IOException e) { + throw new IllegalStateException(e); } + return mockService; + } - public static OpenSearchSettings mockPluginSettings() { - OpenSearchSettings settings = mock(OpenSearchSettings.class); + public static IndexNameExpressionResolver mockIndexNameExpressionResolver() { + IndexNameExpressionResolver mockResolver = mock(IndexNameExpressionResolver.class); + when(mockResolver.concreteIndexNames(any(), any(), anyBoolean(), anyString())) + .thenAnswer( + (Answer) + invocation -> { + // Return index expression directly without resolving + Object indexExprs = invocation.getArguments()[3]; + if (indexExprs instanceof String) { + return new String[] {(String) indexExprs}; + } + return (String[]) indexExprs; + }); + return mockResolver; + } - // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard to mock. - // In this case, default value in Setting will be returned all the time. - doReturn(emptyList()).when(settings).getSettings(); - return settings; - } + public static OpenSearchSettings mockPluginSettings() { + OpenSearchSettings settings = mock(OpenSearchSettings.class); + // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard + // to mock. + // In this case, default value in Setting will be returned all the time. + doReturn(emptyList()).when(settings).getSettings(); + return settings; + } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java index e7e453ca3f..706d49afda 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java @@ -15,7 +15,7 @@ import org.opensearch.sql.data.type.ExprType; /** - * The type of text value. See doc */ public class OpenSearchTextType extends OpenSearchDataType { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java index 3d3a6a5996..bfc06b94c0 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java @@ -491,7 +491,7 @@ public void constructNestedArraysOfStringsReturnsFirstIndex() { public void constructMultiNestedArraysOfStringsReturnsFirstIndex() { assertEquals( stringValue("z"), - tupleValue("{\"stringV\":" + "[\"z\",[\"s\"],[\"zz\", \"au\"]]}").get("stringV")); + tupleValue("{\"stringV\":[\"z\",[\"s\"],[\"zz\", \"au\"]]}").get("stringV")); } @Test From 752da2154d337c74ce60752fe8e47b97ea182bb3 Mon Sep 17 00:00:00 2001 From: Yury-Fridlyand Date: Mon, 21 Aug 2023 09:40:29 -0700 Subject: [PATCH 4/6] Add support for `date_nanos` and tests. (#337) (#1976) * Add support for `date_nanos` and tests. (#337) * Add support for `date_nanos` and tests. Signed-off-by: Yury-Fridlyand * Add more IT. Signed-off-by: Yury-Fridlyand --------- Signed-off-by: Yury-Fridlyand * Typo fix in IT. Signed-off-by: Yury-Fridlyand * Address PR feedback. Signed-off-by: Yury-Fridlyand * Spotless Signed-off-by: Yury-Fridlyand --------- Signed-off-by: Yury-Fridlyand --- .../org/opensearch/sql/ppl/DataTypeIT.java | 1 + .../opensearch/sql/ppl/SystemFunctionIT.java | 25 ++++-- .../opensearch/sql/sql/DateTimeFormatsIT.java | 90 +++++++++++++++++++ .../opensearch/sql/sql/SystemFunctionIT.java | 18 +++- integ-test/src/test/resources/datatypes.json | 2 +- .../src/test/resources/date_formats.json | 2 +- .../datatypes_index_mapping.json | 5 +- .../date_formats_index_mapping.json | 2 +- .../data/type/OpenSearchDataType.java | 3 + .../data/type/OpenSearchDataTypeTest.java | 1 + 10 files changed, 133 insertions(+), 16 deletions(-) diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java index 8b5a6d498e..fe5c2ff270 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java @@ -49,6 +49,7 @@ public void test_nonnumeric_data_types() throws IOException { schema("text_value", "string"), schema("binary_value", "binary"), schema("date_value", "timestamp"), + schema("date_nanos_value", "timestamp"), schema("ip_value", "ip"), schema("object_value", "struct"), schema("nested_value", "array"), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java index d2cd140e99..1c23935f81 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java @@ -70,19 +70,28 @@ public void typeof_opensearch_types() throws IOException { response = executeQuery( String.format( - "source=%s | eval " - + "`text` = typeof(text_value), `date` = typeof(date_value)," - + "`boolean` = typeof(boolean_value), `object` = typeof(object_value)," - + "`keyword` = typeof(keyword_value), `ip` = typeof(ip_value)," - + "`binary` = typeof(binary_value), `geo_point` = typeof(geo_point_value)" + "source=%s | eval `text` = typeof(text_value), `date` = typeof(date_value)," + + " `date_nanos` = typeof(date_nanos_value),`boolean` = typeof(boolean_value)," + + " `object` = typeof(object_value),`keyword` = typeof(keyword_value), `ip` =" + + " typeof(ip_value),`binary` = typeof(binary_value), `geo_point` =" + + " typeof(geo_point_value)" // TODO activate this test once `ARRAY` type supported, see // ExpressionAnalyzer::isTypeNotSupported // + ", `nested` = typeof(nested_value)" - + " | fields `text`, `date`, `boolean`, `object`, `keyword`, `ip`, `binary`," - + " `geo_point`", + + " | fields `text`, `date`, `date_nanos`, `boolean`, `object`, `keyword`," + + " `ip`, `binary`, `geo_point`", TEST_INDEX_DATATYPE_NONNUMERIC)); verifyDataRows( response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); + rows( + "TEXT", + "TIMESTAMP", + "TIMESTAMP", + "BOOLEAN", + "OBJECT", + "KEYWORD", + "IP", + "BINARY", + "GEO_POINT")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java index d6f2d2c7f4..13c2eecd56 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java @@ -5,6 +5,7 @@ package org.opensearch.sql.sql; +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATATYPE_NONNUMERIC; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE_FORMATS; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -29,6 +30,7 @@ public class DateTimeFormatsIT extends SQLIntegTestCase { public void init() throws Exception { super.init(); loadIndex(Index.DATE_FORMATS); + loadIndex(Index.DATA_TYPE_NONNUMERIC); } @Test @@ -139,6 +141,94 @@ public void testNumericFormats() { rows("1970-01-02 03:55:00", "1970-01-01 00:01:40.5")); } + @Test + @SneakyThrows + public void testDateNanosWithFormats() { + String query = + String.format("SELECT hour_minute_second_OR_t_time" + " FROM %s", TEST_INDEX_DATE_FORMATS); + JSONObject result = executeQuery(query); + verifySchema(result, schema("hour_minute_second_OR_t_time", null, "time")); + verifyDataRows(result, rows("09:07:42"), rows("07:07:42.123456789")); + } + + @Test + @SneakyThrows + public void testDateNanosWithFunctions() { + // in memory funcs + String query = + String.format( + "SELECT" + + " hour_minute_second_OR_t_time > TIME '08:07:00'," + + " hour_minute_second_OR_t_time < TIME '08:07:00'," + + " hour_minute_second_OR_t_time = t_time_no_millis," + + " hour_minute_second_OR_t_time <> strict_t_time," + + " hour_minute_second_OR_t_time >= t_time" + + " FROM %s", + TEST_INDEX_DATE_FORMATS); + JSONObject result = executeQuery(query); + verifySchema( + result, + schema("hour_minute_second_OR_t_time > TIME '08:07:00'", null, "boolean"), + schema("hour_minute_second_OR_t_time < TIME '08:07:00'", null, "boolean"), + schema("hour_minute_second_OR_t_time = t_time_no_millis", null, "boolean"), + schema("hour_minute_second_OR_t_time <> strict_t_time", null, "boolean"), + schema("hour_minute_second_OR_t_time >= t_time", null, "boolean")); + verifyDataRows( + result, rows(true, false, true, false, true), rows(false, true, false, true, false)); + // push down + query = + String.format( + "SELECT hour_minute_second_OR_t_time" + + " FROM %s WHERE hour_minute_second_OR_t_time > TIME '08:07:00'", + TEST_INDEX_DATE_FORMATS); + result = executeQuery(query); + verifySchema(result, schema("hour_minute_second_OR_t_time", null, "time")); + verifyDataRows(result, rows("09:07:42")); + query = + String.format( + "SELECT hour_minute_second_OR_t_time" + + " FROM %s WHERE hour_minute_second_OR_t_time < TIME '08:07:00'", + TEST_INDEX_DATE_FORMATS); + result = executeQuery(query); + verifySchema(result, schema("hour_minute_second_OR_t_time", null, "time")); + verifyDataRows(result, rows("07:07:42.123456789")); + } + + @Test + @SneakyThrows + public void testDateNanosOrderBy() { + String query = + String.format( + "SELECT hour_minute_second_OR_t_time" + + " FROM %s ORDER BY hour_minute_second_OR_t_time ASC", + TEST_INDEX_DATE_FORMATS); + JSONObject result = executeQuery(query); + verifySchema(result, schema("hour_minute_second_OR_t_time", null, "time")); + verifyDataRows(result, rows("07:07:42.123456789"), rows("09:07:42")); + } + + @Test + @SneakyThrows + public void testDateNanosGroupBy() { + String query = + String.format( + "SELECT count(*)" + " FROM %s GROUP BY hour_minute_second_OR_t_time", + TEST_INDEX_DATE_FORMATS); + JSONObject result = executeQuery(query); + verifySchema(result, schema("count(*)", null, "integer")); + verifyDataRows(result, rows(1), rows(1)); + } + + @Test + @SneakyThrows + public void testDateNanosWithNanos() { + String query = + String.format("SELECT date_nanos_value" + " FROM %s", TEST_INDEX_DATATYPE_NONNUMERIC); + JSONObject result = executeQuery(query); + verifySchema(result, schema("date_nanos_value", null, "timestamp")); + verifyDataRows(result, rows("2019-03-24 01:34:46.123456789")); + } + protected JSONObject executeQuery(String query) throws IOException { Request request = new Request("POST", QUERY_API_ENDPOINT); request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java index 4b39e2925c..d2798728a1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java @@ -55,9 +55,10 @@ public void typeof_opensearch_types() { response = executeJdbcRequest( String.format( - "SELECT typeof(text_value),typeof(date_value), typeof(boolean_value)," - + " typeof(object_value), typeof(keyword_value),typeof(ip_value)," - + " typeof(binary_value), typeof(geo_point_value)" + "SELECT typeof(text_value),typeof(date_value), typeof(date_nanos_value)," + + " typeof(boolean_value), typeof(object_value)," + + " typeof(keyword_value),typeof(ip_value), typeof(binary_value)," + + " typeof(geo_point_value)" // TODO activate this test once `ARRAY` type supported, see // ExpressionAnalyzer::isTypeNotSupported // + ", typeof(nested_value)" @@ -65,6 +66,15 @@ public void typeof_opensearch_types() { TEST_INDEX_DATATYPE_NONNUMERIC)); verifyDataRows( response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); + rows( + "TEXT", + "TIMESTAMP", + "TIMESTAMP", + "BOOLEAN", + "OBJECT", + "KEYWORD", + "IP", + "BINARY", + "GEO_POINT")); } } diff --git a/integ-test/src/test/resources/datatypes.json b/integ-test/src/test/resources/datatypes.json index ea3290ee64..70ddd28763 100644 --- a/integ-test/src/test/resources/datatypes.json +++ b/integ-test/src/test/resources/datatypes.json @@ -1,2 +1,2 @@ {"index":{"_id":"1"}} -{"boolean_value": true, "keyword_value": "keyword", "text_value": "text", "binary_value": "U29tZSBiaW5hcnkgYmxvYg==", "date_value": "2020-10-13 13:00:00", "ip_value": "127.0.0.1", "object_value": {"first": "Dale", "last": "Dale"}, "nested_value": [{"first" : "John", "last" : "Smith"}, {"first" : "Alice", "last" : "White"}], "geo_point_value": { "lat": 40.71, "lon": 74.00 }} +{"boolean_value": true, "keyword_value": "keyword", "text_value": "text", "binary_value": "U29tZSBiaW5hcnkgYmxvYg==", "date_value": "2020-10-13 13:00:00", "date_nanos_value": "2019-03-23T21:34:46.123456789-04:00", "ip_value": "127.0.0.1", "object_value": {"first": "Dale", "last": "Dale"}, "nested_value": [{"first" : "John", "last" : "Smith"}, {"first" : "Alice", "last" : "White"}], "geo_point_value": { "lat": 40.71, "lon": 74.00 }} diff --git a/integ-test/src/test/resources/date_formats.json b/integ-test/src/test/resources/date_formats.json index 13d46a0e8c..2ff0c867a3 100644 --- a/integ-test/src/test/resources/date_formats.json +++ b/integ-test/src/test/resources/date_formats.json @@ -1,4 +1,4 @@ {"index": {}} {"epoch_millis": "450608862000.123456", "epoch_second": "450608862.000123456", "date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time_nanos": "1984-04-12T09:07:42.000123456Z", "basic_date": "19840412", "basic_date_time": "19840412T090742.000Z", "basic_date_time_no_millis": "19840412T090742Z", "basic_ordinal_date": "1984103", "basic_ordinal_date_time": "1984103T090742.000Z", "basic_ordinal_date_time_no_millis": "1984103T090742Z", "basic_time": "090742.000Z", "basic_time_no_millis": "090742Z", "basic_t_time": "T090742.000Z", "basic_t_time_no_millis": "T090742Z", "basic_week_date": "1984W154", "strict_basic_week_date": "1984W154", "basic_week_date_time": "1984W154T090742.000Z", "strict_basic_week_date_time": "1984W154T090742.000Z", "basic_week_date_time_no_millis": "1984W154T090742Z", "strict_basic_week_date_time_no_millis": "1984W154T090742Z", "date": "1984-04-12", "strict_date": "1984-04-12", "date_hour": "1984-04-12T09", "strict_date_hour": "1984-04-12T09", "date_hour_minute": "1984-04-12T09:07", "strict_date_hour_minute": "1984-04-12T09:07", "date_hour_minute_second": "1984-04-12T09:07:42", "strict_date_hour_minute_second": "1984-04-12T09:07:42", "date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "date_time": "1984-04-12T09:07:42.000Z", "strict_date_time": "1984-04-12T09:07:42.000123456Z", "date_time_no_millis": "1984-04-12T09:07:42Z", "strict_date_time_no_millis": "1984-04-12T09:07:42Z", "hour": "09", "strict_hour": "09", "hour_minute": "09:07", "strict_hour_minute": "09:07", "hour_minute_second": "09:07:42", "strict_hour_minute_second": "09:07:42", "hour_minute_second_fraction": "09:07:42.000", "strict_hour_minute_second_fraction": "09:07:42.000", "hour_minute_second_millis": "09:07:42.000", "strict_hour_minute_second_millis": "09:07:42.000", "ordinal_date": "1984-103", "strict_ordinal_date": "1984-103", "ordinal_date_time": "1984-103T09:07:42.000123456Z", "strict_ordinal_date_time": "1984-103T09:07:42.000123456Z", "ordinal_date_time_no_millis": "1984-103T09:07:42Z", "strict_ordinal_date_time_no_millis": "1984-103T09:07:42Z", "time": "09:07:42.000Z", "strict_time": "09:07:42.000Z", "time_no_millis": "09:07:42Z", "strict_time_no_millis": "09:07:42Z", "t_time": "T09:07:42.000Z", "strict_t_time": "T09:07:42.000Z", "t_time_no_millis": "T09:07:42Z", "strict_t_time_no_millis": "T09:07:42Z", "week_date": "1984-W15-4", "strict_week_date": "1984-W15-4", "week_date_time": "1984-W15-4T09:07:42.000Z", "strict_week_date_time": "1984-W15-4T09:07:42.000Z", "week_date_time_no_millis": "1984-W15-4T09:07:42Z", "strict_week_date_time_no_millis": "1984-W15-4T09:07:42Z", "weekyear_week_day": "1984-W15-4", "strict_weekyear_week_day": "1984-W15-4", "year_month_day": "1984-04-12", "strict_year_month_day": "1984-04-12", "yyyy-MM-dd": "1984-04-12", "custom_time": "09:07:42 AM", "yyyy-MM-dd_OR_epoch_millis": "1984-04-12", "hour_minute_second_OR_t_time": "09:07:42", "custom_timestamp": "1984-04-12 09:07:42 ---- AM", "custom_date_or_date": "1984-04-12", "custom_date_or_custom_time": "1961-04-12", "custom_time_parser_check": "85476321", "incomplete_1" : 1984, "incomplete_2": null, "incomplete_custom_date": 1999, "incomplete_custom_time" : 10, "incorrect" : null, "epoch_sec" : 42, "epoch_milli" : 42, "custom_no_delimiter_date" : "19841020", "custom_no_delimiter_time" : "102030", "custom_no_delimiter_ts" : "19841020153548"} {"index": {}} -{"epoch_millis": "450608862000.123456", "epoch_second": "450608862.000123456", "date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time_nanos": "1984-04-12T09:07:42.000123456Z", "basic_date": "19840412", "basic_date_time": "19840412T090742.000Z", "basic_date_time_no_millis": "19840412T090742Z", "basic_ordinal_date": "1984103", "basic_ordinal_date_time": "1984103T090742.000Z", "basic_ordinal_date_time_no_millis": "1984103T090742Z", "basic_time": "090742.000Z", "basic_time_no_millis": "090742Z", "basic_t_time": "T090742.000Z", "basic_t_time_no_millis": "T090742Z", "basic_week_date": "1984W154", "strict_basic_week_date": "1984W154", "basic_week_date_time": "1984W154T090742.000Z", "strict_basic_week_date_time": "1984W154T090742.000Z", "basic_week_date_time_no_millis": "1984W154T090742Z", "strict_basic_week_date_time_no_millis": "1984W154T090742Z", "date": "1984-04-12", "strict_date": "1984-04-12", "date_hour": "1984-04-12T09", "strict_date_hour": "1984-04-12T09", "date_hour_minute": "1984-04-12T09:07", "strict_date_hour_minute": "1984-04-12T09:07", "date_hour_minute_second": "1984-04-12T09:07:42", "strict_date_hour_minute_second": "1984-04-12T09:07:42", "date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "date_time": "1984-04-12T09:07:42.000Z", "strict_date_time": "1984-04-12T09:07:42.000123456Z", "date_time_no_millis": "1984-04-12T09:07:42Z", "strict_date_time_no_millis": "1984-04-12T09:07:42Z", "hour": "09", "strict_hour": "09", "hour_minute": "09:07", "strict_hour_minute": "09:07", "hour_minute_second": "09:07:42", "strict_hour_minute_second": "09:07:42", "hour_minute_second_fraction": "09:07:42.000", "strict_hour_minute_second_fraction": "09:07:42.000", "hour_minute_second_millis": "09:07:42.000", "strict_hour_minute_second_millis": "09:07:42.000", "ordinal_date": "1984-103", "strict_ordinal_date": "1984-103", "ordinal_date_time": "1984-103T09:07:42.000123456Z", "strict_ordinal_date_time": "1984-103T09:07:42.000123456Z", "ordinal_date_time_no_millis": "1984-103T09:07:42Z", "strict_ordinal_date_time_no_millis": "1984-103T09:07:42Z", "time": "09:07:42.000Z", "strict_time": "09:07:42.000Z", "time_no_millis": "09:07:42Z", "strict_time_no_millis": "09:07:42Z", "t_time": "T09:07:42.000Z", "strict_t_time": "T09:07:42.000Z", "t_time_no_millis": "T09:07:42Z", "strict_t_time_no_millis": "T09:07:42Z", "week_date": "1984-W15-4", "strict_week_date": "1984-W15-4", "week_date_time": "1984-W15-4T09:07:42.000Z", "strict_week_date_time": "1984-W15-4T09:07:42.000Z", "week_date_time_no_millis": "1984-W15-4T09:07:42Z", "strict_week_date_time_no_millis": "1984-W15-4T09:07:42Z", "weekyear_week_day": "1984-W15-4", "strict_weekyear_week_day": "1984-W15-4", "year_month_day": "1984-04-12", "strict_year_month_day": "1984-04-12", "yyyy-MM-dd": "1984-04-12", "custom_time": "09:07:42 PM", "yyyy-MM-dd_OR_epoch_millis": "450608862000.123456", "hour_minute_second_OR_t_time": "T09:07:42.000Z", "custom_timestamp": "1984-04-12 10:07:42 ---- PM", "custom_date_or_date": "1984-04-12", "custom_date_or_custom_time": "09:07:00", "custom_time_parser_check": "::: 9-32476542", "incomplete_1" : 2012, "incomplete_2": null, "incomplete_custom_date": 3021, "incomplete_custom_time" : 20, "incorrect" : null, "epoch_sec" : 100500, "epoch_milli" : 100500, "custom_no_delimiter_date" : "19610412", "custom_no_delimiter_time" : "090700", "custom_no_delimiter_ts" : "19610412090700"} +{"epoch_millis": "450608862000.123456", "epoch_second": "450608862.000123456", "date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time_nanos": "1984-04-12T09:07:42.000123456Z", "basic_date": "19840412", "basic_date_time": "19840412T090742.000Z", "basic_date_time_no_millis": "19840412T090742Z", "basic_ordinal_date": "1984103", "basic_ordinal_date_time": "1984103T090742.000Z", "basic_ordinal_date_time_no_millis": "1984103T090742Z", "basic_time": "090742.000Z", "basic_time_no_millis": "090742Z", "basic_t_time": "T090742.000Z", "basic_t_time_no_millis": "T090742Z", "basic_week_date": "1984W154", "strict_basic_week_date": "1984W154", "basic_week_date_time": "1984W154T090742.000Z", "strict_basic_week_date_time": "1984W154T090742.000Z", "basic_week_date_time_no_millis": "1984W154T090742Z", "strict_basic_week_date_time_no_millis": "1984W154T090742Z", "date": "1984-04-12", "strict_date": "1984-04-12", "date_hour": "1984-04-12T09", "strict_date_hour": "1984-04-12T09", "date_hour_minute": "1984-04-12T09:07", "strict_date_hour_minute": "1984-04-12T09:07", "date_hour_minute_second": "1984-04-12T09:07:42", "strict_date_hour_minute_second": "1984-04-12T09:07:42", "date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "date_time": "1984-04-12T09:07:42.000Z", "strict_date_time": "1984-04-12T09:07:42.000123456Z", "date_time_no_millis": "1984-04-12T09:07:42Z", "strict_date_time_no_millis": "1984-04-12T09:07:42Z", "hour": "09", "strict_hour": "09", "hour_minute": "09:07", "strict_hour_minute": "09:07", "hour_minute_second": "09:07:42", "strict_hour_minute_second": "09:07:42", "hour_minute_second_fraction": "09:07:42.000", "strict_hour_minute_second_fraction": "09:07:42.000", "hour_minute_second_millis": "09:07:42.000", "strict_hour_minute_second_millis": "09:07:42.000", "ordinal_date": "1984-103", "strict_ordinal_date": "1984-103", "ordinal_date_time": "1984-103T09:07:42.000123456Z", "strict_ordinal_date_time": "1984-103T09:07:42.000123456Z", "ordinal_date_time_no_millis": "1984-103T09:07:42Z", "strict_ordinal_date_time_no_millis": "1984-103T09:07:42Z", "time": "09:07:42.000Z", "strict_time": "09:07:42.000Z", "time_no_millis": "09:07:42Z", "strict_time_no_millis": "09:07:42Z", "t_time": "T09:07:42.000Z", "strict_t_time": "T09:07:42.000Z", "t_time_no_millis": "T09:07:42Z", "strict_t_time_no_millis": "T09:07:42Z", "week_date": "1984-W15-4", "strict_week_date": "1984-W15-4", "week_date_time": "1984-W15-4T09:07:42.000Z", "strict_week_date_time": "1984-W15-4T09:07:42.000Z", "week_date_time_no_millis": "1984-W15-4T09:07:42Z", "strict_week_date_time_no_millis": "1984-W15-4T09:07:42Z", "weekyear_week_day": "1984-W15-4", "strict_weekyear_week_day": "1984-W15-4", "year_month_day": "1984-04-12", "strict_year_month_day": "1984-04-12", "yyyy-MM-dd": "1984-04-12", "custom_time": "09:07:42 PM", "yyyy-MM-dd_OR_epoch_millis": "450608862000.123456", "hour_minute_second_OR_t_time": "T07:07:42.123456789Z", "custom_timestamp": "1984-04-12 10:07:42 ---- PM", "custom_date_or_date": "1984-04-12", "custom_date_or_custom_time": "09:07:00", "custom_time_parser_check": "::: 9-32476542", "incomplete_1" : 2012, "incomplete_2": null, "incomplete_custom_date": 3021, "incomplete_custom_time" : 20, "incorrect" : null, "epoch_sec" : 100500, "epoch_milli" : 100500, "custom_no_delimiter_date" : "19610412", "custom_no_delimiter_time" : "090700", "custom_no_delimiter_ts" : "19610412090700"} diff --git a/integ-test/src/test/resources/indexDefinitions/datatypes_index_mapping.json b/integ-test/src/test/resources/indexDefinitions/datatypes_index_mapping.json index 8c1759b369..5908114201 100644 --- a/integ-test/src/test/resources/indexDefinitions/datatypes_index_mapping.json +++ b/integ-test/src/test/resources/indexDefinitions/datatypes_index_mapping.json @@ -13,10 +13,13 @@ "binary_value": { "type": "binary" }, - "date_value": { + "date_value": { "type" : "date", "format": "yyyy-MM-dd HH:mm:ss" }, + "date_nanos_value": { + "type" : "date_nanos" + }, "ip_value": { "type": "ip" }, diff --git a/integ-test/src/test/resources/indexDefinitions/date_formats_index_mapping.json b/integ-test/src/test/resources/indexDefinitions/date_formats_index_mapping.json index 65811f8d9e..0b6daaacb4 100644 --- a/integ-test/src/test/resources/indexDefinitions/date_formats_index_mapping.json +++ b/integ-test/src/test/resources/indexDefinitions/date_formats_index_mapping.json @@ -298,7 +298,7 @@ "format": "yyyy-MM-dd||epoch_millis" }, "hour_minute_second_OR_t_time" : { - "type" : "date", + "type" : "date_nanos", "format": "hour_minute_second||t_time" }, "custom_timestamp" : { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java index d276374539..ddbba61260 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java @@ -30,6 +30,7 @@ public enum MappingType { GeoPoint("geo_point", ExprCoreType.UNKNOWN), Binary("binary", ExprCoreType.UNKNOWN), Date("date", ExprCoreType.TIMESTAMP), + DateNanos("date_nanos", ExprCoreType.TIMESTAMP), Object("object", ExprCoreType.STRUCT), Nested("nested", ExprCoreType.ARRAY), Byte("byte", ExprCoreType.BYTE), @@ -130,6 +131,7 @@ public static Map parseMapping(Map i * @param mappingType A mapping type. * @return An instance or inheritor of `OpenSearchDataType`. */ + @SuppressWarnings("unchecked") public static OpenSearchDataType of(MappingType mappingType, Map innerMap) { OpenSearchDataType res = instances.getOrDefault(mappingType.toString(), new OpenSearchDataType(mappingType)); @@ -157,6 +159,7 @@ public static OpenSearchDataType of(MappingType mappingType, Map case Ip: return OpenSearchIpType.of(); case Date: + case DateNanos: // Default date formatter is used when "" is passed as the second parameter String format = (String) innerMap.getOrDefault("format", ""); return OpenSearchDateType.of(format); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java index b0288dc9a7..82e6222dc4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java @@ -105,6 +105,7 @@ private static Stream getTestDataWithType() { Arguments.of(MappingType.Double, "double", DOUBLE), Arguments.of(MappingType.Boolean, "boolean", BOOLEAN), Arguments.of(MappingType.Date, "date", TIMESTAMP), + Arguments.of(MappingType.DateNanos, "date", TIMESTAMP), Arguments.of(MappingType.Object, "object", STRUCT), Arguments.of(MappingType.Nested, "nested", ARRAY), Arguments.of(MappingType.GeoPoint, "geo_point", OpenSearchGeoPointType.of()), From 25beda059d652eea62054445e775fae498d33d29 Mon Sep 17 00:00:00 2001 From: Matthew Wells Date: Mon, 21 Aug 2023 09:57:45 -0700 Subject: [PATCH 5/6] Remove Datetime data type (#1980) * Remove Datetime data type (#336) * removed datetime type, updated tests and documentation Signed-off-by: Matthew Wells * removed duplicate test code, replaced calls of ZoneOffset.UTC with a constant Signed-off-by: Matthew Wells * readded test and edited it to return timestamp, fixed minor checkstyle difference Signed-off-by: Matthew Wells * converted all utc timezone/zone id to be ZoneOffset.UTC Signed-off-by: Matthew Wells * Spotless Apply Signed-off-by: Matthew Wells * Added tests back in and updated to work with timestamp Signed-off-by: Matthew Wells * Spotless Apply Signed-off-by: Matthew Wells * removed duplicate tests, renamed test Signed-off-by: Matthew Wells --------- Signed-off-by: Matthew Wells --- .../sql/data/model/ExprDateValue.java | 10 +- .../sql/data/model/ExprDatetimeValue.java | 99 ---- .../sql/data/model/ExprStringValue.java | 22 +- .../sql/data/model/ExprTimeValue.java | 9 +- .../sql/data/model/ExprTimestampValue.java | 26 +- .../opensearch/sql/data/model/ExprValue.java | 7 - .../sql/data/model/ExprValueUtils.java | 11 +- .../sql/data/type/ExprCoreType.java | 3 +- .../org/opensearch/sql/expression/DSL.java | 4 - .../aggregation/AggregatorFunction.java | 10 - .../expression/aggregation/AvgAggregator.java | 25 - .../datetime/DateTimeFormatterUtil.java | 9 +- .../expression/datetime/DateTimeFunction.java | 501 ++++++------------ .../operator/convert/TypeCastOperator.java | 31 +- .../planner/physical/collector/Rounding.java | 34 +- .../opensearch/sql/utils/DateTimeUtils.java | 22 +- .../opensearch/sql/analysis/AnalyzerTest.java | 2 +- .../sql/data/model/DateTimeValueTest.java | 101 +--- .../sql/data/model/ExprValueCompareTest.java | 59 +-- .../sql/data/model/ExprValueUtilsTest.java | 17 +- .../sql/data/type/ExprTypeTest.java | 2 - .../aggregation/AvgAggregatorTest.java | 18 +- .../aggregation/CountAggregatorTest.java | 7 - .../aggregation/MaxAggregatorTest.java | 7 - .../aggregation/MinAggregatorTest.java | 7 - .../datetime/AddTimeAndSubTimeTest.java | 42 +- .../expression/datetime/ConvertTZTest.java | 78 +-- .../datetime/DateAddAndAddDateTest.java | 102 ++-- .../sql/expression/datetime/DateDiffTest.java | 2 +- .../datetime/DateSubAndSubDateTest.java | 80 +-- .../datetime/DateTimeFunctionTest.java | 65 +-- .../sql/expression/datetime/DateTimeTest.java | 32 +- .../expression/datetime/DateTimeTestBase.java | 10 +- .../sql/expression/datetime/ExtractTest.java | 6 +- .../expression/datetime/FromUnixTimeTest.java | 8 +- .../datetime/NowLikeFunctionTest.java | 22 +- .../expression/datetime/StrToDateTest.java | 37 +- .../expression/datetime/TimeStampAddTest.java | 17 +- .../datetime/TimeStampDiffTest.java | 33 +- .../expression/datetime/TimestampTest.java | 20 +- .../expression/datetime/ToSecondsTest.java | 2 - .../datetime/UnixTimeStampTest.java | 3 +- .../datetime/UnixTwoWayConversionTest.java | 17 +- .../sql/expression/datetime/YearweekTest.java | 6 +- .../BuiltinFunctionRepositoryTest.java | 4 +- .../function/WideningTypeRuleTest.java | 9 +- .../convert/TypeCastOperatorTest.java | 29 +- .../BinaryPredicateOperatorTest.java | 4 +- .../system/SystemFunctionsTest.java | 3 - .../physical/AggregationOperatorTest.java | 32 +- .../physical/PhysicalPlanTestBase.java | 21 +- .../opensearch/sql/utils/ComparisonUtil.java | 8 +- docs/dev/img/type-hierarchy-tree-final.png | Bin 0 -> 30902 bytes docs/user/dql/expressions.rst | 6 +- docs/user/dql/functions.rst | 194 ++++--- docs/user/general/datatypes.rst | 127 ++--- docs/user/ppl/functions/datetime.rst | 197 ++++--- docs/user/ppl/functions/system.rst | 2 +- docs/user/ppl/general/datatypes.rst | 37 +- .../sql/ppl/ConvertTZFunctionIT.java | 28 +- .../sql/ppl/DateTimeComparisonIT.java | 356 ------------- .../sql/ppl/DateTimeFunctionIT.java | 173 +++--- .../sql/ppl/DateTimeImplementationIT.java | 28 +- .../opensearch/sql/ppl/SystemFunctionIT.java | 7 +- .../org/opensearch/sql/sql/AggregationIT.java | 39 +- .../sql/sql/ConvertTZFunctionIT.java | 38 +- .../sql/sql/DateTimeComparisonIT.java | 356 ------------- .../sql/sql/DateTimeFunctionIT.java | 169 +++--- .../sql/sql/DateTimeImplementationIT.java | 30 +- .../opensearch/sql/sql/SystemFunctionIT.java | 5 +- .../data/type/OpenSearchDateType.java | 1 - .../value/OpenSearchExprValueFactory.java | 16 +- .../ExpressionAggregationScript.java | 1 - .../dsl/BucketAggregationBuilder.java | 3 +- .../script/filter/lucene/LuceneQuery.java | 10 - .../data/type/OpenSearchDateTypeTest.java | 25 +- .../value/OpenSearchExprValueFactoryTest.java | 25 +- .../AggregationQueryBuilderTest.java | 15 - .../ExpressionAggregationScriptTest.java | 9 - .../dsl/BucketAggregationBuilderTest.java | 2 +- .../filter/ExpressionFilterScriptTest.java | 10 - .../script/filter/FilterQueryBuilderTest.java | 30 +- 82 files changed, 1063 insertions(+), 2611 deletions(-) delete mode 100644 core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java create mode 100644 docs/dev/img/type-hierarchy-tree-final.png diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java index 3f3f67a4fa..c36cd3ea6d 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java @@ -6,13 +6,12 @@ package org.opensearch.sql.data.model; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import com.google.common.base.Objects; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; @@ -57,14 +56,9 @@ public LocalTime timeValue() { return LocalTime.of(0, 0, 0); } - @Override - public LocalDateTime datetimeValue() { - return LocalDateTime.of(date, timeValue()); - } - @Override public Instant timestampValue() { - return ZonedDateTime.of(date, timeValue(), UTC_ZONE_ID).toInstant(); + return ZonedDateTime.of(date, timeValue(), ZoneOffset.UTC).toInstant(); } @Override diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java deleted file mode 100644 index 305958043f..0000000000 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.sql.data.model; - -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_WITH_TZ; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; - -import com.google.common.base.Objects; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeParseException; -import java.time.temporal.ChronoUnit; -import lombok.RequiredArgsConstructor; -import org.opensearch.sql.data.type.ExprCoreType; -import org.opensearch.sql.data.type.ExprType; -import org.opensearch.sql.exception.SemanticCheckException; - -@RequiredArgsConstructor -public class ExprDatetimeValue extends AbstractExprValue { - private final LocalDateTime datetime; - - /** Constructor with datetime string as input. */ - public ExprDatetimeValue(String datetime) { - try { - this.datetime = LocalDateTime.parse(datetime, DATE_TIME_FORMATTER_WITH_TZ); - } catch (DateTimeParseException e) { - throw new SemanticCheckException( - String.format( - "datetime:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", - datetime)); - } - } - - @Override - public LocalDateTime datetimeValue() { - return datetime; - } - - @Override - public LocalDate dateValue() { - return datetime.toLocalDate(); - } - - @Override - public LocalTime timeValue() { - return datetime.toLocalTime(); - } - - @Override - public Instant timestampValue() { - return ZonedDateTime.of(datetime, UTC_ZONE_ID).toInstant(); - } - - @Override - public boolean isDateTime() { - return true; - } - - @Override - public int compare(ExprValue other) { - return datetime.compareTo(other.datetimeValue()); - } - - @Override - public boolean equal(ExprValue other) { - return datetime.equals(other.datetimeValue()); - } - - @Override - public String value() { - return String.format( - "%s %s", - DateTimeFormatter.ISO_DATE.format(datetime), - DateTimeFormatter.ISO_TIME.format( - (datetime.getNano() == 0) ? datetime.truncatedTo(ChronoUnit.SECONDS) : datetime)); - } - - @Override - public ExprType type() { - return ExprCoreType.DATETIME; - } - - @Override - public String toString() { - return String.format("DATETIME '%s'", value()); - } - - @Override - public int hashCode() { - return Objects.hashCode(datetime); - } -} diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java index 7745af62b6..f2e63e986d 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java @@ -5,6 +5,7 @@ package org.opensearch.sql.data.model; +import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; @@ -35,27 +36,20 @@ public String stringValue() { } @Override - public LocalDateTime datetimeValue() { + public Instant timestampValue() { try { - return new ExprDatetimeValue(value).datetimeValue(); + return new ExprTimestampValue(value).timestampValue(); } catch (SemanticCheckException e) { - try { - return new ExprDatetimeValue( - LocalDateTime.of(new ExprDateValue(value).dateValue(), LocalTime.of(0, 0, 0))) - .datetimeValue(); - } catch (SemanticCheckException exception) { - throw new SemanticCheckException( - String.format( - "datetime:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", - value)); - } + return new ExprTimestampValue( + LocalDateTime.of(new ExprDateValue(value).dateValue(), LocalTime.of(0, 0, 0))) + .timestampValue(); } } @Override public LocalDate dateValue() { try { - return new ExprDatetimeValue(value).dateValue(); + return new ExprTimestampValue(value).dateValue(); } catch (SemanticCheckException e) { return new ExprDateValue(value).dateValue(); } @@ -64,7 +58,7 @@ public LocalDate dateValue() { @Override public LocalTime timeValue() { try { - return new ExprDatetimeValue(value).timeValue(); + return new ExprTimestampValue(value).timeValue(); } catch (SemanticCheckException e) { return new ExprTimeValue(value).timeValue(); } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java index d808af49b1..6b5a4a7c48 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java @@ -7,12 +7,11 @@ import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.util.Objects; @@ -57,12 +56,8 @@ public LocalDate dateValue(FunctionProperties functionProperties) { return LocalDate.now(functionProperties.getQueryStartClock()); } - public LocalDateTime datetimeValue(FunctionProperties functionProperties) { - return LocalDateTime.of(dateValue(functionProperties), timeValue()); - } - public Instant timestampValue(FunctionProperties functionProperties) { - return ZonedDateTime.of(dateValue(functionProperties), timeValue(), UTC_ZONE_ID).toInstant(); + return ZonedDateTime.of(dateValue(functionProperties), timeValue(), ZoneOffset.UTC).toInstant(); } @Override diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java index 455a379b03..e103dc7253 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java @@ -7,12 +7,12 @@ import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_WITHOUT_NANO; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.format.DateTimeParseException; import java.time.temporal.ChronoUnit; import java.util.Objects; @@ -32,7 +32,7 @@ public ExprTimestampValue(String timestamp) { try { this.timestamp = LocalDateTime.parse(timestamp, DATE_TIME_FORMATTER_VARIABLE_NANOS) - .atZone(UTC_ZONE_ID) + .atZone(ZoneOffset.UTC) .toInstant(); } catch (DateTimeParseException e) { throw new SemanticCheckException( @@ -42,13 +42,18 @@ public ExprTimestampValue(String timestamp) { } } + /** localDateTime Constructor. */ + public ExprTimestampValue(LocalDateTime localDateTime) { + this.timestamp = localDateTime.atZone(ZoneOffset.UTC).toInstant(); + } + @Override public String value() { return timestamp.getNano() == 0 ? DATE_TIME_FORMATTER_WITHOUT_NANO - .withZone(UTC_ZONE_ID) + .withZone(ZoneOffset.UTC) .format(timestamp.truncatedTo(ChronoUnit.SECONDS)) - : DATE_TIME_FORMATTER_VARIABLE_NANOS.withZone(UTC_ZONE_ID).format(timestamp); + : DATE_TIME_FORMATTER_VARIABLE_NANOS.withZone(ZoneOffset.UTC).format(timestamp); } @Override @@ -63,17 +68,12 @@ public Instant timestampValue() { @Override public LocalDate dateValue() { - return timestamp.atZone(UTC_ZONE_ID).toLocalDate(); + return timestamp.atZone(ZoneOffset.UTC).toLocalDate(); } @Override public LocalTime timeValue() { - return timestamp.atZone(UTC_ZONE_ID).toLocalTime(); - } - - @Override - public LocalDateTime datetimeValue() { - return timestamp.atZone(UTC_ZONE_ID).toLocalDateTime(); + return timestamp.atZone(ZoneOffset.UTC).toLocalTime(); } @Override @@ -88,12 +88,12 @@ public String toString() { @Override public int compare(ExprValue other) { - return timestamp.compareTo(other.timestampValue().atZone(UTC_ZONE_ID).toInstant()); + return timestamp.compareTo(other.timestampValue().atZone(ZoneOffset.UTC).toInstant()); } @Override public boolean equal(ExprValue other) { - return timestamp.equals(other.timestampValue().atZone(UTC_ZONE_ID).toInstant()); + return timestamp.equals(other.timestampValue().atZone(ZoneOffset.UTC).toInstant()); } @Override diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java index 86bead77b7..034ed22a75 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java @@ -8,7 +8,6 @@ import java.io.Serializable; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; import java.time.temporal.TemporalAmount; import java.util.List; @@ -133,12 +132,6 @@ default LocalDate dateValue() { "invalid to get dateValue from value of type " + type()); } - /** Get datetime value. */ - default LocalDateTime datetimeValue() { - throw new ExpressionEvaluationException( - "invalid to get datetimeValue from value of type " + type()); - } - /** Get interval value. */ default TemporalAmount intervalValue() { throw new ExpressionEvaluationException( diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java index a259eb9fba..20813045f2 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java @@ -9,6 +9,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.temporal.TemporalAmount; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -66,10 +67,6 @@ public static ExprValue dateValue(LocalDate value) { return new ExprDateValue(value); } - public static ExprValue datetimeValue(LocalDateTime value) { - return new ExprDatetimeValue(value); - } - public static ExprValue timeValue(LocalTime value) { return new ExprTimeValue(value); } @@ -128,14 +125,14 @@ public static ExprValue fromObjectValue(Object o) { return floatValue((Float) o); } else if (o instanceof LocalDate) { return dateValue((LocalDate) o); - } else if (o instanceof LocalDateTime) { - return datetimeValue((LocalDateTime) o); } else if (o instanceof LocalTime) { return timeValue((LocalTime) o); } else if (o instanceof Instant) { return timestampValue((Instant) o); } else if (o instanceof TemporalAmount) { return intervalValue((TemporalAmount) o); + } else if (o instanceof LocalDateTime) { + return timestampValue(((LocalDateTime) o).toInstant(ZoneOffset.UTC)); } else { throw new ExpressionEvaluationException("unsupported object " + o.getClass()); } @@ -150,8 +147,6 @@ public static ExprValue fromObjectValue(Object o, ExprCoreType type) { return new ExprDateValue((String) o); case TIME: return new ExprTimeValue((String) o); - case DATETIME: - return new ExprDatetimeValue((String) o); default: return fromObjectValue(o); } diff --git a/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java b/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java index f1979d8666..cbc0c98255 100644 --- a/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java +++ b/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java @@ -42,8 +42,7 @@ public enum ExprCoreType implements ExprType { /** Date. */ DATE(STRING), TIME(STRING), - DATETIME(STRING, DATE, TIME), - TIMESTAMP(STRING, DATETIME), + TIMESTAMP(STRING, DATE, TIME), INTERVAL(UNDEFINED), /** Struct. */ diff --git a/core/src/main/java/org/opensearch/sql/expression/DSL.java b/core/src/main/java/org/opensearch/sql/expression/DSL.java index 4341668b69..12a7faafb2 100644 --- a/core/src/main/java/org/opensearch/sql/expression/DSL.java +++ b/core/src/main/java/org/opensearch/sql/expression/DSL.java @@ -819,10 +819,6 @@ public static FunctionExpression castTimestamp(Expression value) { return compile(FunctionProperties.None, BuiltinFunctionName.CAST_TO_TIMESTAMP, value); } - public static FunctionExpression castDatetime(Expression value) { - return compile(FunctionProperties.None, BuiltinFunctionName.CAST_TO_DATETIME, value); - } - public static FunctionExpression typeof(Expression value) { return compile(FunctionProperties.None, BuiltinFunctionName.TYPEOF, value); } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java index 4a1d4d309b..bfc92d73c6 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java @@ -7,7 +7,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -71,9 +70,6 @@ private static DefaultFunctionResolver avg() { .put( new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new AvgAggregator(arguments, DATE)) - .put( - new FunctionSignature(functionName, Collections.singletonList(DATETIME)), - (functionProperties, arguments) -> new AvgAggregator(arguments, DATETIME)) .put( new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new AvgAggregator(arguments, TIME)) @@ -142,9 +138,6 @@ private static DefaultFunctionResolver min() { .put( new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new MinAggregator(arguments, DATE)) - .put( - new FunctionSignature(functionName, Collections.singletonList(DATETIME)), - (functionProperties, arguments) -> new MinAggregator(arguments, DATETIME)) .put( new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new MinAggregator(arguments, TIME)) @@ -177,9 +170,6 @@ private static DefaultFunctionResolver max() { .put( new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new MaxAggregator(arguments, DATE)) - .put( - new FunctionSignature(functionName, Collections.singletonList(DATETIME)), - (functionProperties, arguments) -> new MaxAggregator(arguments, DATETIME)) .put( new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new MaxAggregator(arguments, TIME)) diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java index c528968018..c32ebb6071 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java @@ -13,7 +13,6 @@ import java.util.List; import java.util.Locale; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprNullValue; @@ -47,8 +46,6 @@ public AvgState create() { switch (dataType) { case DATE: return new DateAvgState(); - case DATETIME: - return new DateTimeAvgState(); case TIMESTAMP: return new TimestampAvgState(); case TIME: @@ -128,28 +125,6 @@ protected AvgState iterate(ExprValue value) { } } - protected static class DateTimeAvgState extends AvgState { - @Override - public ExprValue result() { - if (0 == count.integerValue()) { - return ExprNullValue.of(); - } - - return new ExprDatetimeValue( - new ExprTimestampValue( - Instant.ofEpochMilli( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) - .datetimeValue()); - } - - @Override - protected AvgState iterate(ExprValue value) { - total = - DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())).valueOf(); - return super.iterate(value); - } - } - protected static class TimestampAvgState extends AvgState { @Override public ExprValue result() { diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java index 13f9a077e4..d23cbc2df3 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java @@ -12,6 +12,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.ResolverStyle; @@ -21,9 +22,9 @@ import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.function.FunctionProperties; @@ -245,12 +246,12 @@ static ExprValue getFormattedString( /** * Format the date using the date format String. * - * @param dateExpr the date ExprValue of Date/Datetime/Timestamp/String type. + * @param dateExpr the date ExprValue of Date/Timestamp/String type. * @param formatExpr the format ExprValue of String type. * @return Date formatted using format and returned as a String. */ static ExprValue getFormattedDate(ExprValue dateExpr, ExprValue formatExpr) { - final LocalDateTime date = dateExpr.datetimeValue(); + final LocalDateTime date = dateExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); return getFormattedString(formatExpr, DATE_HANDLERS, date); } @@ -364,7 +365,7 @@ static ExprValue parseStringWithDateOrTime( output = LocalDateTime.of(year, month, day, hour, minute, second); } - return new ExprDatetimeValue(output); + return new ExprTimestampValue(output); } /** diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java index d17d59d358..a42a599ad8 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java @@ -14,7 +14,6 @@ import static java.time.temporal.ChronoUnit.WEEKS; import static java.time.temporal.ChronoUnit.YEARS; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -41,9 +40,8 @@ import static org.opensearch.sql.utils.DateTimeFormatters.SHORT_DATE_LENGTH; import static org.opensearch.sql.utils.DateTimeFormatters.SINGLE_DIGIT_MONTH_DATE_LENGTH; import static org.opensearch.sql.utils.DateTimeFormatters.SINGLE_DIGIT_YEAR_DATE_LENGTH; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import static org.opensearch.sql.utils.DateTimeUtils.extractDate; -import static org.opensearch.sql.utils.DateTimeUtils.extractDateTime; +import static org.opensearch.sql.utils.DateTimeUtils.extractTimestamp; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableTable; @@ -74,13 +72,13 @@ import lombok.experimental.UtilityClass; import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprLongValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.exception.ExpressionEvaluationException; @@ -110,7 +108,7 @@ public class DateTimeFunction { // The number of days from year zero to year 1970. private static final Long DAYS_0000_TO_1970 = (146097 * 5L) - (30L * 365L + 7L); - // MySQL doesn't process any datetime/timestamp values which are greater than + // MySQL doesn't process any timestamp values which are greater than // 32536771199.999999, or equivalent '3001-01-18 23:59:59.999999' UTC private static final Double MYSQL_MAX_TIMESTAMP = 32536771200d; @@ -150,11 +148,6 @@ public class DateTimeFunction { .put("date", "iso", "%Y-%m-%d") .put("date", "eur", "%d.%m.%Y") .put("date", "internal", "%Y%m%d") - .put("datetime", "usa", "%Y-%m-%d %H.%i.%s") - .put("datetime", "jis", "%Y-%m-%d %H:%i:%s") - .put("datetime", "iso", "%Y-%m-%d %H:%i:%s") - .put("datetime", "eur", "%Y-%m-%d %H.%i.%s") - .put("datetime", "internal", "%Y%m%d%H%i%s") .put("time", "usa", "%h:%i:%s %p") .put("time", "jis", "%H:%i:%s") .put("time", "iso", "%H:%i:%s") @@ -255,8 +248,8 @@ private FunctionResolver now(FunctionName functionName) { functionName, implWithProperties( functionProperties -> - new ExprDatetimeValue(formatNow(functionProperties.getQueryStartClock())), - DATETIME)); + new ExprTimestampValue(formatNow(functionProperties.getQueryStartClock())), + TIMESTAMP)); } private FunctionResolver now() { @@ -280,12 +273,12 @@ private FunctionResolver sysdate() { return define( BuiltinFunctionName.SYSDATE.getName(), implWithProperties( - functionProperties -> new ExprDatetimeValue(formatNow(Clock.systemDefaultZone())), - DATETIME), + functionProperties -> new ExprTimestampValue(formatNow(Clock.systemDefaultZone())), + TIMESTAMP), FunctionDSL.implWithProperties( (functionProperties, v) -> - new ExprDatetimeValue(formatNow(Clock.systemDefaultZone(), v.integerValue())), - DATETIME, + new ExprTimestampValue(formatNow(Clock.systemDefaultZone(), v.integerValue())), + TIMESTAMP, INTEGER)); } @@ -329,37 +322,34 @@ private FunctionResolver current_date() { * Specify a start date and add/subtract a temporal amount to/from the date.
* The return type depends on the date type and the interval unit. Detailed supported signatures: *
- * (DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME
+ * (DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP
* MySQL has these signatures too
* (DATE, INTERVAL) -> DATE // when interval has no time part
* (TIME, INTERVAL) -> TIME // when interval has no date part
- * (STRING, INTERVAL) -> STRING // when argument has date or datetime string,
- * // result has date or datetime depending on interval type
+ * (STRING, INTERVAL) -> STRING // when argument has date or timestamp string,
+ * // result has date or timestamp depending on interval type
*/ private Stream> get_date_add_date_sub_signatures( SerializableTriFunction function) { return Stream.of( - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, DATE, INTERVAL), - implWithProperties( - nullMissingHandlingWithProperties(function), DATETIME, DATETIME, INTERVAL), + implWithProperties(nullMissingHandlingWithProperties(function), TIMESTAMP, DATE, INTERVAL), implWithProperties( - nullMissingHandlingWithProperties(function), DATETIME, TIMESTAMP, INTERVAL), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, INTERVAL)); + nullMissingHandlingWithProperties(function), TIMESTAMP, TIMESTAMP, INTERVAL), + implWithProperties(nullMissingHandlingWithProperties(function), TIMESTAMP, TIME, INTERVAL)); } /** * A common signature for `adddate` and `subdate`.
* Adds/subtracts an integer number of days to/from the first argument.
* (DATE, LONG) -> DATE
- * (TIME/DATETIME/TIMESTAMP, LONG) -> DATETIME + * (TIME/TIMESTAMP, LONG) -> TIMESTAMP */ private Stream> get_adddate_subdate_signatures( SerializableTriFunction function) { return Stream.of( implWithProperties(nullMissingHandlingWithProperties(function), DATE, DATE, LONG), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, DATETIME, LONG), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIMESTAMP, LONG), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, LONG)); + implWithProperties(nullMissingHandlingWithProperties(function), TIMESTAMP, TIMESTAMP, LONG), + implWithProperties(nullMissingHandlingWithProperties(function), TIMESTAMP, TIME, LONG)); } private DefaultFunctionResolver adddate() { @@ -374,8 +364,8 @@ private DefaultFunctionResolver adddate() { /** * Adds expr2 to expr1 and returns the result.
- * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME
- * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME
+ * (TIME, TIME/DATE/TIMESTAMP) -> TIME
+ * (DATE/TIMESTAMP, TIME/DATE/TIMESTAMP) -> TIMESTAMP
* TODO: MySQL has these signatures too
* (STRING, STRING/TIME) -> STRING // second arg - string with time only
* (x, STRING) -> NULL // second arg - string with timestamp
@@ -388,8 +378,6 @@ private DefaultFunctionResolver addtime() { nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, DATETIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, @@ -397,56 +385,32 @@ private DefaultFunctionResolver addtime() { TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, - DATETIME, + TIMESTAMP, + DATE, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, - DATETIME, - DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, - DATETIME, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, - DATETIME, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), DATETIME, DATE, TIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), DATETIME, DATE, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, + TIMESTAMP, DATE, - DATETIME), + DATE), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, + TIMESTAMP, DATE, TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, + TIMESTAMP, TIMESTAMP, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, + TIMESTAMP, TIMESTAMP, DATE), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, TIMESTAMP)); } @@ -454,21 +418,21 @@ private DefaultFunctionResolver addtime() { /** * Converts date/time from a specified timezone to another specified timezone.
* The supported signatures:
- * (DATETIME, STRING, STRING) -> DATETIME
- * (STRING, STRING, STRING) -> DATETIME + * (TIMESTAMP, STRING, STRING) -> TIMESTAMP
+ * (STRING, STRING, STRING) -> TIMESTAMP */ private DefaultFunctionResolver convert_tz() { return define( BuiltinFunctionName.CONVERT_TZ.getName(), impl( nullMissingHandling(DateTimeFunction::exprConvertTZ), - DATETIME, - DATETIME, + TIMESTAMP, + TIMESTAMP, STRING, STRING), impl( nullMissingHandling(DateTimeFunction::exprConvertTZ), - DATETIME, + TIMESTAMP, STRING, STRING, STRING)); @@ -476,41 +440,25 @@ private DefaultFunctionResolver convert_tz() { /** * Extracts the date part of a date and time value. Also to construct a date type. The supported - * signatures: STRING/DATE/DATETIME/TIMESTAMP -> DATE + * signatures: STRING/DATE/TIMESTAMP -> DATE */ private DefaultFunctionResolver date() { return define( BuiltinFunctionName.DATE.getName(), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, STRING), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, TIMESTAMP)); } - /* - * Calculates the difference of date part of given values. - * (DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME) -> LONG + /** + * Calculates the difference of date part of given values.
+ * (DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME) -> LONG */ private DefaultFunctionResolver datediff() { return define( BuiltinFunctionName.DATEDIFF.getName(), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, DATE, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATETIME, - DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATE, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATETIME, - DATETIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, DATE, TIME), implWithProperties( @@ -541,40 +489,20 @@ private DefaultFunctionResolver datediff() { nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, TIME, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - TIMESTAMP, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATETIME, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - TIME, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATETIME, - TIME)); + TIMESTAMP)); } /** * Specify a datetime with time zone field and a time zone to convert to.
- * Returns a local date time.
- * (STRING, STRING) -> DATETIME
- * (STRING) -> DATETIME + * Returns a local datetime.
+ * (STRING, STRING) -> TIMESTAMP
+ * (STRING) -> TIMESTAMP */ private FunctionResolver datetime() { return define( BuiltinFunctionName.DATETIME.getName(), - impl(nullMissingHandling(DateTimeFunction::exprDateTime), DATETIME, STRING, STRING), - impl(nullMissingHandling(DateTimeFunction::exprDateTimeNoTimezone), DATETIME, STRING)); + impl(nullMissingHandling(DateTimeFunction::exprDateTime), TIMESTAMP, STRING, STRING), + impl(nullMissingHandling(DateTimeFunction::exprDateTimeNoTimezone), TIMESTAMP, STRING)); } private DefaultFunctionResolver date_add() { @@ -593,30 +521,28 @@ private DefaultFunctionResolver date_sub() { .toArray(SerializableFunction[]::new)); } - /** DAY(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). */ + /** DAY(STRING/DATE/TIMESTAMP). return the day of the month (1-31). */ private DefaultFunctionResolver day() { return define( BuiltinFunctionName.DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING)); } /** - * DAYNAME(STRING/DATE/DATETIME/TIMESTAMP). return the name of the weekday for date, including + * DAYNAME(STRING/DATE/TIMESTAMP). return the name of the weekday for date, including
* Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday. */ private DefaultFunctionResolver dayName() { return define( BuiltinFunctionName.DAYNAME.getName(), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, STRING)); } - /** DAYOFMONTH(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). */ + /** DAYOFMONTH(STRING/DATE/TIMESTAMP). return the day of the month (1-31). */ private DefaultFunctionResolver dayOfMonth(BuiltinFunctionName name) { return define( name.getName(), @@ -627,14 +553,13 @@ private DefaultFunctionResolver dayOfMonth(BuiltinFunctionName name) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP)); } /** - * DAYOFWEEK(STRING/DATE/DATETIME/TIME/TIMESTAMP). return the weekday index for date (1 = Sunday, - * 2 = Monday, ..., 7 = Saturday). + * DAYOFWEEK(STRING/DATE/TIME/TIMESTAMP). return the weekday index for date (1 = Sunday, 2 = + * Monday, ..., 7 = Saturday). */ private DefaultFunctionResolver dayOfWeek(FunctionName name) { return define( @@ -646,12 +571,11 @@ private DefaultFunctionResolver dayOfWeek(FunctionName name) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, STRING)); } - /** DAYOFYEAR(STRING/DATE/DATETIME/TIMESTAMP). return the day of the year for date (1-366). */ + /** DAYOFYEAR(STRING/DATE/TIMESTAMP). return the day of the year for date (1-366). */ private DefaultFunctionResolver dayOfYear(BuiltinFunctionName dayOfYear) { return define( dayOfYear.getName(), @@ -662,7 +586,6 @@ private DefaultFunctionResolver dayOfYear(BuiltinFunctionName dayOfYear) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, STRING)); } @@ -676,7 +599,6 @@ private DefaultFunctionResolver extract() { STRING, TIME), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, DATE), - impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, STRING)); } @@ -691,7 +613,7 @@ private DefaultFunctionResolver from_days() { private FunctionResolver from_unixtime() { return define( BuiltinFunctionName.FROM_UNIXTIME.getName(), - impl(nullMissingHandling(DateTimeFunction::exprFromUnixTime), DATETIME, DOUBLE), + impl(nullMissingHandling(DateTimeFunction::exprFromUnixTime), TIMESTAMP, DOUBLE), impl( nullMissingHandling(DateTimeFunction::exprFromUnixTimeFormat), STRING, DOUBLE, STRING)); } @@ -702,14 +624,13 @@ private DefaultFunctionResolver get_format() { impl(nullMissingHandling(DateTimeFunction::exprGetFormat), STRING, STRING, STRING)); } - /** HOUR(STRING/TIME/DATETIME/DATE/TIMESTAMP). return the hour value for time. */ + /** HOUR(STRING/TIME/DATE/TIMESTAMP). return the hour value for time. */ private DefaultFunctionResolver hour(BuiltinFunctionName name) { return define( name.getName(), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIMESTAMP)); } @@ -724,7 +645,6 @@ private DefaultFunctionResolver last_day() { DATE, TIME), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, DATE), - impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, TIMESTAMP)); } @@ -740,39 +660,36 @@ private FunctionResolver maketime() { impl(nullMissingHandling(DateTimeFunction::exprMakeTime), TIME, DOUBLE, DOUBLE, DOUBLE)); } - /** MICROSECOND(STRING/TIME/DATETIME/TIMESTAMP). return the microsecond value for time. */ + /** MICROSECOND(STRING/TIME/TIMESTAMP). return the microsecond value for time. */ private DefaultFunctionResolver microsecond() { return define( BuiltinFunctionName.MICROSECOND.getName(), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIME), - impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIMESTAMP)); } - /** MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. */ + /** MINUTE(STRING/TIME/TIMESTAMP). return the minute value for time. */ private DefaultFunctionResolver minute(BuiltinFunctionName name) { return define( name.getName(), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIME), - impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIMESTAMP)); } - /** MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. */ + /** MINUTE(STRING/TIME/TIMESTAMP). return the minute value for time. */ private DefaultFunctionResolver minute_of_day() { return define( BuiltinFunctionName.MINUTE_OF_DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIMESTAMP)); } - /** MONTH(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-12). */ + /** MONTH(STRING/DATE/TIMESTAMP). return the month for date (1-12). */ private DefaultFunctionResolver month(BuiltinFunctionName month) { return define( month.getName(), @@ -783,17 +700,15 @@ private DefaultFunctionResolver month(BuiltinFunctionName month) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, STRING)); } - /** MONTHNAME(STRING/DATE/DATETIME/TIMESTAMP). return the full name of the month for date. */ + /** MONTHNAME(STRING/DATE/TIMESTAMP). return the full name of the month for date. */ private DefaultFunctionResolver monthName() { return define( BuiltinFunctionName.MONTHNAME.getName(), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, DATE), - impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, STRING)); } @@ -819,12 +734,11 @@ private DefaultFunctionResolver period_diff() { impl(nullMissingHandling(DateTimeFunction::exprPeriodDiff), INTEGER, INTEGER, INTEGER)); } - /** QUARTER(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-4). */ + /** QUARTER(STRING/DATE/TIMESTAMP). return the month for date (1-4). */ private DefaultFunctionResolver quarter() { return define( BuiltinFunctionName.QUARTER.getName(), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, STRING)); } @@ -838,14 +752,13 @@ private DefaultFunctionResolver sec_to_time() { impl((nullMissingHandling(DateTimeFunction::exprSecToTimeWithNanos)), TIME, FLOAT)); } - /** SECOND(STRING/TIME/DATETIME/TIMESTAMP). return the second value for time. */ + /** SECOND(STRING/TIME/TIMESTAMP). return the second value for time. */ private DefaultFunctionResolver second(BuiltinFunctionName name) { return define( name.getName(), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIMESTAMP)); } @@ -861,8 +774,8 @@ private DefaultFunctionResolver subdate() { /** * Subtracts expr2 from expr1 and returns the result.
- * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME
- * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME
+ * (TIME, TIME/DATE/TIMESTAMP) -> TIME
+ * (DATE/TIMESTAMP, TIME/DATE/TIMESTAMP) -> TIMESTAMP
* TODO: MySQL has these signatures too
* (STRING, STRING/TIME) -> STRING // second arg - string with time only
* (x, STRING) -> NULL // second arg - string with timestamp
@@ -875,8 +788,6 @@ private DefaultFunctionResolver subtime() { nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, DATETIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, @@ -884,62 +795,38 @@ private DefaultFunctionResolver subtime() { TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATETIME, + TIMESTAMP, + TIMESTAMP, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATETIME, + TIMESTAMP, + TIMESTAMP, DATE), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATETIME, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATETIME, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), DATETIME, DATE, TIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), DATETIME, DATE, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATE, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATE, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, + DATE, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, + DATE, DATE), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, - DATETIME), + DATE, + TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, + TIMESTAMP, TIMESTAMP, TIMESTAMP)); } /** - * Extracts a date, time, or datetime from the given string. It accomplishes this using another + * Extracts a date, time, or timestamp from the given string. It accomplishes this using another * string which specifies the input format. */ private DefaultFunctionResolver str_to_date() { @@ -949,21 +836,20 @@ private DefaultFunctionResolver str_to_date() { nullMissingHandlingWithProperties( (functionProperties, arg, format) -> DateTimeFunction.exprStrToDate(functionProperties, arg, format)), - DATETIME, + TIMESTAMP, STRING, STRING)); } /** * Extracts the time part of a date and time value. Also to construct a time type. The supported - * signatures: STRING/DATE/DATETIME/TIME/TIMESTAMP -> TIME + * signatures: STRING/DATE/TIME/TIMESTAMP -> TIME */ private DefaultFunctionResolver time() { return define( BuiltinFunctionName.TIME.getName(), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, STRING), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, DATE), - impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, TIME), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, TIMESTAMP)); } @@ -973,7 +859,6 @@ private DefaultFunctionResolver time() { * (TIME, TIME) -> TIME
* MySQL has these signatures too
* (DATE, DATE) -> TIME // result is > 24 hours
- * (DATETIME, DATETIME) -> TIME // result is > 24 hours
* (TIMESTAMP, TIMESTAMP) -> TIME // result is > 24 hours
* (x, x) -> NULL // when args have different types
* (STRING, STRING) -> TIME // argument strings contain same types only
@@ -985,23 +870,20 @@ private DefaultFunctionResolver timediff() { impl(nullMissingHandling(DateTimeFunction::exprTimeDiff), TIME, TIME, TIME)); } - /** - * TIME_TO_SEC(STRING/TIME/DATETIME/TIMESTAMP). return the time argument, converted to seconds. - */ + /** TIME_TO_SEC(STRING/TIME/TIMESTAMP). return the time argument, converted to seconds. */ private DefaultFunctionResolver time_to_sec() { return define( BuiltinFunctionName.TIME_TO_SEC.getName(), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, STRING), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIME), - impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, DATETIME)); + impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIMESTAMP)); } /** * Extracts the timestamp of a date and time value.
* Input strings may contain a timestamp only in format 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'
- * STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP
- * STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP
+ * STRING/DATE/TIME/TIMESTAMP -> TIMESTAMP
+ * STRING/DATE/TIME/TIMESTAMP, STRING/DATE/TIME/TIMESTAMP -> TIMESTAMP
* All types are converted to TIMESTAMP actually before the function call - it is responsibility *
* of the automatic cast mechanism defined in `ExprCoreType` and performed by `TypeCastOperator`. @@ -1020,27 +902,20 @@ private DefaultFunctionResolver timestamp() { } /** - * Adds an interval of time to the provided DATE/DATETIME/TIME/TIMESTAMP/STRING argument. The - * interval of time added is determined by the given first and second arguments. The first - * argument is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, - * MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] The second argument is the amount of the - * interval type to be added. The third argument is the DATE/DATETIME/TIME/TIMESTAMP/STRING to add - * to. + * Adds an interval of time to the provided DATE/TIME/TIMESTAMP/STRING argument. The interval of + * time added is determined by the given first and second arguments. The first argument is an + * interval type, and must be one of the tokens below... [MICROSECOND, SECOND, MINUTE, HOUR, DAY, + * WEEK, MONTH, QUARTER, YEAR] The second argument is the amount of the interval type to be added. + * The third argument is the DATE/TIME/TIMESTAMP/STRING to add to. * - * @return The DATETIME representing the summed DATE/DATETIME/TIME/TIMESTAMP and interval. + * @return The TIMESTAMP representing the summed DATE/TIME/TIMESTAMP and interval. */ private DefaultFunctionResolver timestampadd() { return define( BuiltinFunctionName.TIMESTAMPADD.getName(), impl( nullMissingHandling(DateTimeFunction::exprTimestampAdd), - DATETIME, - STRING, - INTEGER, - DATETIME), - impl( - nullMissingHandling(DateTimeFunction::exprTimestampAdd), - DATETIME, + TIMESTAMP, STRING, INTEGER, TIMESTAMP), @@ -1049,18 +924,18 @@ private DefaultFunctionResolver timestampadd() { (functionProperties, part, amount, time) -> exprTimestampAddForTimeType( functionProperties.getQueryStartClock(), part, amount, time)), - DATETIME, + TIMESTAMP, STRING, INTEGER, TIME)); } /** - * Finds the difference between provided DATE/DATETIME/TIME/TIMESTAMP/STRING arguments. The first - * argument is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, - * MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] The second argument the - * DATE/DATETIME/TIME/TIMESTAMP/STRING representing the start time. The third argument is the - * DATE/DATETIME/TIME/TIMESTAMP/STRING representing the end time. + * Finds the difference between provided DATE/TIME/TIMESTAMP/STRING arguments. The first argument + * is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, MINUTE, HOUR, + * DAY, WEEK, MONTH, QUARTER, YEAR] The second argument the DATE/TIME/TIMESTAMP/STRING + * representing the start time. The third argument is the DATE/TIME/TIMESTAMP/STRING representing + * the end time. * * @return A LONG representing the difference between arguments, using the given interval type. */ @@ -1069,25 +944,7 @@ private DefaultFunctionResolver timestampdiff() { BuiltinFunctionName.TIMESTAMPDIFF.getName(), impl( nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, - STRING, - DATETIME, - DATETIME), - impl( - nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, - STRING, - DATETIME, - TIMESTAMP), - impl( - nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, - STRING, TIMESTAMP, - DATETIME), - impl( - nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, TIMESTAMP, TIMESTAMP), @@ -1095,20 +952,19 @@ private DefaultFunctionResolver timestampdiff() { nullMissingHandlingWithProperties( (functionProperties, part, startTime, endTime) -> exprTimestampDiffForTimeType(functionProperties, part, startTime, endTime)), - DATETIME, + TIMESTAMP, STRING, TIME, TIME)); } - /** TO_DAYS(STRING/DATE/DATETIME/TIMESTAMP). return the day number of the given date. */ + /** TO_DAYS(STRING/DATE/TIMESTAMP). return the day number of the given date. */ private DefaultFunctionResolver to_days() { return define( BuiltinFunctionName.TO_DAYS.getName(), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, STRING), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, DATE), - impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, DATETIME)); + impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, DATE)); } /** @@ -1131,7 +987,6 @@ private FunctionResolver unix_timestamp() { DateTimeFunction.unixTimeStamp(functionProperties.getQueryStartClock()), LONG), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DATE), - impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DATETIME), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DOUBLE)); } @@ -1154,7 +1009,7 @@ private DefaultFunctionResolver utc_time() { private DefaultFunctionResolver utc_timestamp() { return define( BuiltinFunctionName.UTC_TIMESTAMP.getName(), - implWithProperties(functionProperties -> exprUtcTimeStamp(functionProperties), DATETIME)); + implWithProperties(functionProperties -> exprUtcTimeStamp(functionProperties), TIMESTAMP)); } /** WEEK(DATE[,mode]). return the week number for date. */ @@ -1169,7 +1024,6 @@ private DefaultFunctionResolver week(BuiltinFunctionName week) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, STRING), implWithProperties( @@ -1181,7 +1035,6 @@ private DefaultFunctionResolver week(BuiltinFunctionName week) { TIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, DATE, INTEGER), - impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, DATETIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, TIMESTAMP, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, STRING, INTEGER)); } @@ -1198,17 +1051,15 @@ private DefaultFunctionResolver weekday() { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, STRING)); } - /** YEAR(STRING/DATE/DATETIME/TIMESTAMP). return the year for date (1000-9999). */ + /** YEAR(STRING/DATE/TIMESTAMP). return the year for date (1000-9999). */ private DefaultFunctionResolver year() { return define( BuiltinFunctionName.YEAR.getName(), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, STRING)); } @@ -1225,7 +1076,6 @@ private DefaultFunctionResolver yearweek() { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, STRING), implWithProperties( @@ -1236,7 +1086,6 @@ private DefaultFunctionResolver yearweek() { TIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, DATE, INTEGER), - impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, DATETIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, TIMESTAMP, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, STRING, INTEGER)); } @@ -1246,7 +1095,6 @@ private DefaultFunctionResolver yearweek() { * Detailed supported signatures:
* (STRING, STRING) -> STRING
* (DATE, STRING) -> STRING
- * (DATETIME, STRING) -> STRING
* (TIME, STRING) -> STRING
* (TIMESTAMP, STRING) -> STRING */ @@ -1255,8 +1103,6 @@ private DefaultFunctionResolver date_format() { BuiltinFunctionName.DATE_FORMAT.getName(), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, STRING, STRING), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, DATE, STRING), - impl( - nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, DATETIME, STRING), implWithProperties( nullMissingHandlingWithProperties( (functionProperties, time, formatString) -> @@ -1299,9 +1145,9 @@ private ExprValue dayOfWeekToday(Clock clock) { * DATE_ADD function implementation for ExprValue. * * @param functionProperties An FunctionProperties object. - * @param datetime ExprValue of Date/Time/Datetime/Timestamp type. + * @param datetime ExprValue of Date/Time/Timestamp type. * @param interval ExprValue of Interval type, the temporal amount to add. - * @return Datetime resulted from `interval` added to `datetime`. + * @return Timestamp resulted from `interval` added to `timestamp`. */ private ExprValue exprAddDateInterval( FunctionProperties functionProperties, ExprValue datetime, ExprValue interval) { @@ -1309,21 +1155,22 @@ private ExprValue exprAddDateInterval( } /** - * Adds or subtracts `interval` to/from `datetime`. + * Adds or subtracts `interval` to/from `timestamp`. * * @param functionProperties An FunctionProperties object. - * @param datetime A Date/Time/Datetime/Timestamp value to change. + * @param datetime A Date/Time/Timestamp value to change. * @param interval An Interval to isAdd or subtract. * @param isAdd A flag: true to isAdd, false to subtract. - * @return Datetime calculated. + * @return Timestamp calculated. */ private ExprValue exprDateApplyInterval( FunctionProperties functionProperties, ExprValue datetime, TemporalAmount interval, Boolean isAdd) { - var dt = extractDateTime(datetime, functionProperties); - return new ExprDatetimeValue(isAdd ? dt.plus(interval) : dt.minus(interval)); + var dt = + extractTimestamp(datetime, functionProperties).atZone(ZoneOffset.UTC).toLocalDateTime(); + return new ExprTimestampValue(isAdd ? dt.plus(interval) : dt.minus(interval)); } /** @@ -1331,7 +1178,6 @@ private ExprValue exprDateApplyInterval( * Detailed supported signatures:
* (STRING, STRING) -> STRING
* (DATE, STRING) -> STRING
- * (DATETIME, STRING) -> STRING
* (TIME, STRING) -> STRING
* (TIMESTAMP, STRING) -> STRING */ @@ -1340,8 +1186,6 @@ private DefaultFunctionResolver time_format() { BuiltinFunctionName.TIME_FORMAT.getName(), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, STRING, STRING), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, DATE, STRING), - impl( - nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, DATETIME, STRING), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, TIME, STRING), impl( nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), @@ -1354,9 +1198,9 @@ private DefaultFunctionResolver time_format() { * ADDDATE function implementation for ExprValue. * * @param functionProperties An FunctionProperties object. - * @param datetime ExprValue of Time/Date/Datetime/Timestamp type. + * @param datetime ExprValue of Time/Date/Timestamp type. * @param days ExprValue of Long type, representing the number of days to add. - * @return Date/Datetime resulted from days added to `datetime`. + * @return Date/Timestamp resulted from days added to `timestamp`. */ private ExprValue exprAddDateDays( FunctionProperties functionProperties, ExprValue datetime, ExprValue days) { @@ -1364,13 +1208,13 @@ private ExprValue exprAddDateDays( } /** - * Adds or subtracts `days` to/from `datetime`. + * Adds or subtracts `days` to/from `timestamp`. * * @param functionProperties An FunctionProperties object. - * @param datetime A Date/Time/Datetime/Timestamp value to change. + * @param datetime A Date/Time/Timestamp value to change. * @param days A days amount to add or subtract. * @param isAdd A flag: true to add, false to subtract. - * @return Datetime calculated. + * @return Timestamp calculated. */ private ExprValue exprDateApplyDays( FunctionProperties functionProperties, ExprValue datetime, Long days, Boolean isAdd) { @@ -1378,16 +1222,17 @@ private ExprValue exprDateApplyDays( return new ExprDateValue( isAdd ? datetime.dateValue().plusDays(days) : datetime.dateValue().minusDays(days)); } - var dt = extractDateTime(datetime, functionProperties); - return new ExprDatetimeValue(isAdd ? dt.plusDays(days) : dt.minusDays(days)); + var dt = + extractTimestamp(datetime, functionProperties).atZone(ZoneOffset.UTC).toLocalDateTime(); + return new ExprTimestampValue(isAdd ? dt.plusDays(days) : dt.minusDays(days)); } /** * Adds or subtracts time to/from date and returns the result. * * @param functionProperties A FunctionProperties object. - * @param temporal A Date/Time/Datetime/Timestamp value to change. - * @param temporalDelta A Date/Time/Datetime/Timestamp object to add/subtract time from. + * @param temporal A Date/Time/Timestamp value to change. + * @param temporalDelta A Date/Time/Timestamp object to add/subtract time from. * @param isAdd A flag: true to add, false to subtract. * @return A value calculated. */ @@ -1399,19 +1244,19 @@ private ExprValue exprApplyTime( var interval = Duration.between(LocalTime.MIN, temporalDelta.timeValue()); var result = isAdd - ? extractDateTime(temporal, functionProperties).plus(interval) - : extractDateTime(temporal, functionProperties).minus(interval); + ? extractTimestamp(temporal, functionProperties).plus(interval) + : extractTimestamp(temporal, functionProperties).minus(interval); return temporal.type() == TIME - ? new ExprTimeValue(result.toLocalTime()) - : new ExprDatetimeValue(result); + ? new ExprTimeValue(result.atZone(ZoneOffset.UTC).toLocalTime()) + : new ExprTimestampValue(result); } /** * Adds time to date and returns the result. * * @param functionProperties A FunctionProperties object. - * @param temporal A Date/Time/Datetime/Timestamp value to change. - * @param temporalDelta A Date/Time/Datetime/Timestamp object to add time from. + * @param temporal A Date/Time/Timestamp value to change. + * @param temporalDelta A Date/Time/Timestamp object to add time from. * @return A value calculated. */ private ExprValue exprAddTime( @@ -1423,10 +1268,10 @@ private ExprValue exprAddTime( * CONVERT_TZ function implementation for ExprValue. Returns null for time zones outside of +13:00 * and -12:00. * - * @param startingDateTime ExprValue of DateTime that is being converted from + * @param startingDateTime ExprValue of Timestamp that is being converted from * @param fromTz ExprValue of time zone, representing the time to convert from. * @param toTz ExprValue of time zone, representing the time to convert to. - * @return DateTime that has been converted to the to_tz timezone. + * @return Timestamp that has been converted to the to_tz timezone. */ private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, ExprValue toTz) { if (startingDateTime.type() == ExprCoreType.STRING) { @@ -1442,8 +1287,10 @@ private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, Ex || !DateTimeUtils.isValidMySqlTimeZoneId(convertedToTz)) { return ExprNullValue.of(); } - ZonedDateTime zonedDateTime = startingDateTime.datetimeValue().atZone(convertedFromTz); - return new ExprDatetimeValue( + ZonedDateTime zonedDateTime = + (startingDateTime.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()) + .atZone(convertedFromTz); + return new ExprTimestampValue( zonedDateTime.withZoneSameInstant(convertedToTz).toLocalDateTime()); // Catches exception for invalid timezones. @@ -1484,43 +1331,43 @@ private ExprValue exprDateDiff( } /** - * DateTime implementation for ExprValue. + * Timestamp implementation for ExprValue. * - * @param dateTime ExprValue of String type. + * @param timestamp ExprValue of String type. * @param timeZone ExprValue of String type (or null). * @return ExprValue of date type. */ - private ExprValue exprDateTime(ExprValue dateTime, ExprValue timeZone) { + private ExprValue exprDateTime(ExprValue timestamp, ExprValue timeZone) { String defaultTimeZone = TimeZone.getDefault().getID(); try { LocalDateTime ldtFormatted = - LocalDateTime.parse(dateTime.stringValue(), DATE_TIME_FORMATTER_STRICT_WITH_TZ); + LocalDateTime.parse(timestamp.stringValue(), DATE_TIME_FORMATTER_STRICT_WITH_TZ); if (timeZone.isNull()) { - return new ExprDatetimeValue(ldtFormatted); + return new ExprTimestampValue(ldtFormatted); } - // Used if datetime field is invalid format. + // Used if timestamp field is invalid format. } catch (DateTimeParseException e) { return ExprNullValue.of(); } ExprValue convertTZResult; - ExprDatetimeValue ldt; + ExprTimestampValue tz; String toTz; try { ZonedDateTime zdtWithZoneOffset = - ZonedDateTime.parse(dateTime.stringValue(), DATE_TIME_FORMATTER_STRICT_WITH_TZ); + ZonedDateTime.parse(timestamp.stringValue(), DATE_TIME_FORMATTER_STRICT_WITH_TZ); ZoneId fromTZ = zdtWithZoneOffset.getZone(); - ldt = new ExprDatetimeValue(zdtWithZoneOffset.toLocalDateTime()); + tz = new ExprTimestampValue(zdtWithZoneOffset.toLocalDateTime()); toTz = String.valueOf(fromTZ); } catch (DateTimeParseException e) { - ldt = new ExprDatetimeValue(dateTime.stringValue()); + tz = new ExprTimestampValue(timestamp.stringValue()); toTz = defaultTimeZone; } - convertTZResult = exprConvertTZ(ldt, new ExprStringValue(toTz), timeZone); + convertTZResult = exprConvertTZ(tz, new ExprStringValue(toTz), timeZone); return convertTZResult; } @@ -1549,7 +1396,7 @@ private ExprValue exprDayName(ExprValue date) { /** * Day of Month implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/String/Time/Timestamp type. + * @param date ExprValue of Date/String/Time/Timestamp type. * @return ExprValue. */ private ExprValue exprDayOfMonth(ExprValue date) { @@ -1559,7 +1406,7 @@ private ExprValue exprDayOfMonth(ExprValue date) { /** * Day of Week implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/String/Timstamp type. + * @param date ExprValue of Date/String/Timstamp type. * @return ExprValue. */ private ExprValue exprDayOfWeek(ExprValue date) { @@ -1577,15 +1424,15 @@ private ExprValue exprDayOfYear(ExprValue date) { } /** - * Obtains a formatted long value for a specified part and datetime for the 'extract' function. + * Obtains a formatted long value for a specified part and timestamp for the 'extract' function. * * @param part is an ExprValue which comes from a defined list of accepted values. - * @param datetime the date to be formatted as an ExprValue. + * @param timestamp the date to be formatted as an ExprValue. * @return is a LONG formatted according to the input arguments. */ - public ExprLongValue formatExtractFunction(ExprValue part, ExprValue datetime) { + public ExprLongValue formatExtractFunction(ExprValue part, ExprValue timestamp) { String partName = part.stringValue().toUpperCase(); - LocalDateTime arg = datetime.datetimeValue(); + LocalDateTime arg = timestamp.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); String text = arg.format(DateTimeFormatter.ofPattern(extract_formats.get(partName), Locale.ENGLISH)); @@ -1596,11 +1443,11 @@ public ExprLongValue formatExtractFunction(ExprValue part, ExprValue datetime) { * Implements extract function. Returns a LONG formatted according to the 'part' argument. * * @param part Literal that determines the format of the outputted LONG. - * @param datetime The date/datetime to be formatted. + * @param timestamp The Date/Timestamp to be formatted. * @return A LONG */ - private ExprValue exprExtract(ExprValue part, ExprValue datetime) { - return formatExtractFunction(part, datetime); + private ExprValue exprExtract(ExprValue part, ExprValue timestamp) { + return formatExtractFunction(part, timestamp); } /** @@ -1613,7 +1460,7 @@ private ExprValue exprExtract(ExprValue part, ExprValue datetime) { private ExprValue exprExtractForTime( FunctionProperties functionProperties, ExprValue part, ExprValue time) { return formatExtractFunction( - part, new ExprDatetimeValue(extractDateTime(time, functionProperties))); + part, new ExprTimestampValue(extractTimestamp(time, functionProperties))); } /** @@ -1637,12 +1484,12 @@ private ExprValue exprFromUnixTime(ExprValue time) { if (MYSQL_MAX_TIMESTAMP <= time.doubleValue()) { return ExprNullValue.of(); } - return new ExprDatetimeValue(exprFromUnixTimeImpl(time)); + return new ExprTimestampValue(exprFromUnixTimeImpl(time)); } private LocalDateTime exprFromUnixTimeImpl(ExprValue time) { return LocalDateTime.ofInstant( - Instant.ofEpochSecond((long) Math.floor(time.doubleValue())), UTC_ZONE_ID) + Instant.ofEpochSecond((long) Math.floor(time.doubleValue())), ZoneOffset.UTC) .withNano((int) ((time.doubleValue() % 1) * 1E9)); } @@ -1694,11 +1541,11 @@ private LocalDate getLastDay(LocalDate today) { /** * Returns a DATE for the last day of the month of a given argument. * - * @param datetime A DATE/DATETIME/TIMESTAMP/STRING ExprValue. + * @param timestamp A DATE/TIMESTAMP/STRING ExprValue. * @return An DATE value corresponding to the last day of the month of the given argument. */ - private ExprValue exprLastDay(ExprValue datetime) { - return new ExprDateValue(getLastDay(datetime.dateValue())); + private ExprValue exprLastDay(ExprValue timestamp) { + return new ExprDateValue(getLastDay(timestamp.dateValue())); } /** @@ -1932,9 +1779,9 @@ private ExprValue exprSecond(ExprValue time) { * SUBDATE function implementation for ExprValue. * * @param functionProperties An FunctionProperties object. - * @param date ExprValue of Time/Date/Datetime/Timestamp type. + * @param date ExprValue of Time/Date/Timestamp type. * @param days ExprValue of Long type, representing the number of days to subtract. - * @return Date/Datetime resulted from days subtracted to date. + * @return Date/Timestamp resulted from days subtracted to date. */ private ExprValue exprSubDateDays( FunctionProperties functionProperties, ExprValue date, ExprValue days) { @@ -1945,9 +1792,9 @@ private ExprValue exprSubDateDays( * DATE_SUB function implementation for ExprValue. * * @param functionProperties An FunctionProperties object. - * @param datetime ExprValue of Time/Date/Datetime/Timestamp type. + * @param datetime ExprValue of Time/Date/Timestamp type. * @param expr ExprValue of Interval type, the temporal amount to subtract. - * @return Datetime resulted from expr subtracted to `datetime`. + * @return Timestamp resulted from expr subtracted to `timestamp`. */ private ExprValue exprSubDateInterval( FunctionProperties functionProperties, ExprValue datetime, ExprValue expr) { @@ -1957,8 +1804,8 @@ private ExprValue exprSubDateInterval( /** * Subtracts expr2 from expr1 and returns the result. * - * @param temporal A Date/Time/Datetime/Timestamp value to change. - * @param temporalDelta A Date/Time/Datetime/Timestamp to subtract time from. + * @param temporal A Date/Time/Timestamp value to change. + * @param temporalDelta A Date/Time/Timestamp to subtract time from. * @return A value calculated. */ private ExprValue exprSubTime( @@ -2012,7 +1859,8 @@ private ExprValue exprTimestampAdd( ExprValue partExpr, ExprValue amountExpr, ExprValue datetimeExpr) { String part = partExpr.stringValue(); int amount = amountExpr.integerValue(); - LocalDateTime datetime = datetimeExpr.datetimeValue(); + LocalDateTime timestamp = + datetimeExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); ChronoUnit temporalUnit; switch (part) { @@ -2047,13 +1895,13 @@ private ExprValue exprTimestampAdd( default: return ExprNullValue.of(); } - return new ExprDatetimeValue(datetime.plus(amount, temporalUnit)); + return new ExprTimestampValue(timestamp.plus(amount, temporalUnit)); } private ExprValue exprTimestampAddForTimeType( Clock clock, ExprValue partExpr, ExprValue amountExpr, ExprValue timeExpr) { LocalDateTime datetime = LocalDateTime.of(formatNow(clock).toLocalDate(), timeExpr.timeValue()); - return exprTimestampAdd(partExpr, amountExpr, new ExprDatetimeValue(datetime)); + return exprTimestampAdd(partExpr, amountExpr, new ExprTimestampValue(datetime)); } private ExprValue getTimeDifference(String part, LocalDateTime startTime, LocalDateTime endTime) { @@ -2095,15 +1943,17 @@ private ExprValue getTimeDifference(String part, LocalDateTime startTime, LocalD private ExprValue exprTimestampDiff( ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( - partExpr.stringValue(), startTimeExpr.datetimeValue(), endTimeExpr.datetimeValue()); + partExpr.stringValue(), + startTimeExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(), + endTimeExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } private ExprValue exprTimestampDiffForTimeType( FunctionProperties fp, ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( partExpr.stringValue(), - extractDateTime(startTimeExpr, fp), - extractDateTime(endTimeExpr, fp)); + extractTimestamp(startTimeExpr, fp).atZone(ZoneOffset.UTC).toLocalDateTime(), + extractTimestamp(endTimeExpr, fp).atZone(ZoneOffset.UTC).toLocalDateTime()); } /** @@ -2134,8 +1984,9 @@ private ExprValue exprUtcTime(FunctionProperties functionProperties) { */ private ExprValue exprUtcTimeStamp(FunctionProperties functionProperties) { var zdt = - ZonedDateTime.now(functionProperties.getQueryStartClock()).withZoneSameInstant(UTC_ZONE_ID); - return new ExprDatetimeValue(zdt.toLocalDateTime()); + ZonedDateTime.now(functionProperties.getQueryStartClock()) + .withZoneSameInstant(ZoneOffset.UTC); + return new ExprTimestampValue(zdt.toLocalDateTime()); } /** @@ -2151,12 +2002,13 @@ private ExprValue exprToDays(ExprValue date) { /** * To_seconds implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/Timestamp/String type. + * @param date ExprValue of Date/Timestamp/String type. * @return ExprValue. */ private ExprValue exprToSeconds(ExprValue date) { return new ExprLongValue( - date.datetimeValue().toEpochSecond(ZoneOffset.UTC) + DAYS_0000_TO_1970 * SECONDS_PER_DAY); + date.timestampValue().atOffset(ZoneOffset.UTC).toEpochSecond() + + DAYS_0000_TO_1970 * SECONDS_PER_DAY); } /** @@ -2226,7 +2078,7 @@ private ExprValue exprToSecondsForIntType(ExprValue dateExpr) { /** * Week for date implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/Timestamp/String type. + * @param date ExprValue of Date/Timestamp/String type. * @param mode ExprValue of Integer type. */ private ExprValue exprWeek(ExprValue date, ExprValue mode) { @@ -2237,7 +2089,7 @@ private ExprValue exprWeek(ExprValue date, ExprValue mode) { /** * Weekday implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/String/Timstamp type. + * @param date ExprValue of Date/String/Timstamp type. * @return ExprValue. */ private ExprValue exprWeekday(ExprValue date) { @@ -2270,9 +2122,6 @@ private Double unixTimeStampOfImpl(ExprValue value) { switch ((ExprCoreType) value.type()) { case DATE: return value.dateValue().toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; - case DATETIME: - return value.datetimeValue().toEpochSecond(ZoneOffset.UTC) - + value.datetimeValue().getNano() / 1E9; case TIMESTAMP: return value.timestampValue().getEpochSecond() + value.timestampValue().getNano() / 1E9; default: @@ -2323,7 +2172,7 @@ private Double unixTimeStampOfImpl(ExprValue value) { * Week for date implementation for ExprValue. When mode is not specified default value mode 0 is * used for default_week_format. * - * @param date ExprValue of Date/Datetime/Timestamp/String type. + * @param date ExprValue of Date/Timestamp/String type. * @return ExprValue. */ private ExprValue exprWeekWithoutMode(ExprValue date) { @@ -2363,7 +2212,7 @@ private ExprIntegerValue extractYearweek(LocalDate date, int mode) { /** * Yearweek for date implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/Time/Timestamp/String type. + * @param date ExprValue of Date/Time/Timestamp/String type. * @param mode ExprValue of Integer type. */ private ExprValue exprYearweek(ExprValue date, ExprValue mode) { @@ -2374,7 +2223,7 @@ private ExprValue exprYearweek(ExprValue date, ExprValue mode) { * Yearweek for date implementation for ExprValue. When mode is not specified default value mode 0 * is used. * - * @param date ExprValue of Date/Datetime/Time/Timestamp/String type. + * @param date ExprValue of Date/Time/Timestamp/String type. * @return ExprValue. */ private ExprValue exprYearweekWithoutMode(ExprValue date) { diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java b/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java index 7c3565f69c..db4b29f3b9 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java @@ -8,7 +8,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -29,7 +28,6 @@ import org.opensearch.sql.data.model.ExprBooleanValue; import org.opensearch.sql.data.model.ExprByteValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprFloatValue; import org.opensearch.sql.data.model.ExprIntegerValue; @@ -58,7 +56,6 @@ public static void register(BuiltinFunctionRepository repository) { repository.register(castToDate()); repository.register(castToTime()); repository.register(castToTimestamp()); - repository.register(castToDatetime()); } private static DefaultFunctionResolver castToString() { @@ -66,8 +63,7 @@ private static DefaultFunctionResolver castToString() { BuiltinFunctionName.CAST_TO_STRING.getName(), Stream.concat( Arrays.asList( - BYTE, SHORT, INTEGER, LONG, FLOAT, DOUBLE, BOOLEAN, TIME, DATE, TIMESTAMP, - DATETIME) + BYTE, SHORT, INTEGER, LONG, FLOAT, DOUBLE, BOOLEAN, TIME, DATE, TIMESTAMP) .stream() .map( type -> @@ -180,7 +176,6 @@ private static DefaultFunctionResolver castToDate() { return FunctionDSL.define( BuiltinFunctionName.CAST_TO_DATE.getName(), impl(nullMissingHandling((v) -> new ExprDateValue(v.stringValue())), DATE, STRING), - impl(nullMissingHandling((v) -> new ExprDateValue(v.dateValue())), DATE, DATETIME), impl(nullMissingHandling((v) -> new ExprDateValue(v.dateValue())), DATE, TIMESTAMP), impl(nullMissingHandling((v) -> v), DATE, DATE)); } @@ -189,21 +184,16 @@ private static DefaultFunctionResolver castToTime() { return FunctionDSL.define( BuiltinFunctionName.CAST_TO_TIME.getName(), impl(nullMissingHandling((v) -> new ExprTimeValue(v.stringValue())), TIME, STRING), - impl(nullMissingHandling((v) -> new ExprTimeValue(v.timeValue())), TIME, DATETIME), impl(nullMissingHandling((v) -> new ExprTimeValue(v.timeValue())), TIME, TIMESTAMP), impl(nullMissingHandling((v) -> v), TIME, TIME)); } - // `DATE`/`TIME`/`DATETIME` -> `DATETIME`/TIMESTAMP` cast tested in BinaryPredicateOperatorTest + // `DATE`/`TIME` -> `TIMESTAMP` cast tested in BinaryPredicateOperatorTest private static DefaultFunctionResolver castToTimestamp() { return FunctionDSL.define( BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), impl( nullMissingHandling((v) -> new ExprTimestampValue(v.stringValue())), TIMESTAMP, STRING), - impl( - nullMissingHandling((v) -> new ExprTimestampValue(v.timestampValue())), - TIMESTAMP, - DATETIME), impl( nullMissingHandling((v) -> new ExprTimestampValue(v.timestampValue())), TIMESTAMP, @@ -215,21 +205,4 @@ private static DefaultFunctionResolver castToTimestamp() { TIME), impl(nullMissingHandling((v) -> v), TIMESTAMP, TIMESTAMP)); } - - private static DefaultFunctionResolver castToDatetime() { - return FunctionDSL.define( - BuiltinFunctionName.CAST_TO_DATETIME.getName(), - impl(nullMissingHandling((v) -> new ExprDatetimeValue(v.stringValue())), DATETIME, STRING), - impl( - nullMissingHandling((v) -> new ExprDatetimeValue(v.datetimeValue())), - DATETIME, - TIMESTAMP), - impl(nullMissingHandling((v) -> new ExprDatetimeValue(v.datetimeValue())), DATETIME, DATE), - implWithProperties( - nullMissingHandlingWithProperties( - (fp, v) -> new ExprDatetimeValue(((ExprTimeValue) v).datetimeValue(fp))), - DATETIME, - TIME), - impl(nullMissingHandling((v) -> v), DATETIME, DATETIME)); - } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java index 81a1a0230f..82c8af52cd 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java @@ -6,17 +6,15 @@ package org.opensearch.sql.planner.physical.collector; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.LONG; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.temporal.ChronoField; import java.util.Arrays; import java.util.concurrent.TimeUnit; @@ -24,7 +22,6 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; @@ -49,9 +46,6 @@ public static Rounding createRounding(SpanExpression span) { if (DOUBLE.isCompatible(type)) { return new DoubleRounding(interval); } - if (type.equals(DATETIME)) { - return new DatetimeRounding(interval, span.getUnit().getName()); - } if (type.equals(TIMESTAMP)) { return new TimestampRounding(interval, span.getUnit().getName()); } @@ -84,26 +78,6 @@ public ExprValue round(ExprValue var) { } } - static class DatetimeRounding extends Rounding { - private final ExprValue interval; - private final DateTimeUnit dateTimeUnit; - - public DatetimeRounding(ExprValue interval, String unit) { - this.interval = interval; - this.dateTimeUnit = DateTimeUnit.resolve(unit); - } - - @Override - public ExprValue round(ExprValue var) { - Instant instant = - Instant.ofEpochMilli( - dateTimeUnit.round( - var.datetimeValue().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), - interval.integerValue())); - return new ExprDatetimeValue(instant.atZone(UTC_ZONE_ID).toLocalDateTime()); - } - } - static class DateRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -118,9 +92,9 @@ public ExprValue round(ExprValue var) { Instant instant = Instant.ofEpochMilli( dateTimeUnit.round( - var.dateValue().atStartOfDay().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), + var.dateValue().atStartOfDay().atZone(ZoneOffset.UTC).toInstant().toEpochMilli(), interval.integerValue())); - return new ExprDateValue(instant.atZone(UTC_ZONE_ID).toLocalDate()); + return new ExprDateValue(instant.atZone(ZoneOffset.UTC).toLocalDate()); } } @@ -144,7 +118,7 @@ public ExprValue round(ExprValue var) { Instant.ofEpochMilli( dateTimeUnit.round( var.timeValue().getLong(ChronoField.MILLI_OF_DAY), interval.integerValue())); - return new ExprTimeValue(instant.atZone(UTC_ZONE_ID).toLocalTime()); + return new ExprTimeValue(instant.atZone(ZoneOffset.UTC).toLocalTime()); } } diff --git a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java index 593b4c4471..62d5f0246d 100644 --- a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java @@ -9,6 +9,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneId; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import lombok.experimental.UtilityClass; import org.opensearch.sql.data.model.ExprTimeValue; @@ -48,9 +49,9 @@ public static long roundWeek(long utcMillis, int interval) { * @return Rounded date/time value in utc millis */ public static long roundMonth(long utcMillis, int interval) { - ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); + ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); ZonedDateTime zonedDateTime = - Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval); + Instant.ofEpochMilli(utcMillis).atZone(ZoneOffset.UTC).plusMonths(interval); long monthDiff = (zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime.getMonthValue() @@ -67,9 +68,9 @@ public static long roundMonth(long utcMillis, int interval) { * @return Rounded date/time value in utc millis */ public static long roundQuarter(long utcMillis, int interval) { - ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); + ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); ZonedDateTime zonedDateTime = - Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval * 3L); + Instant.ofEpochMilli(utcMillis).atZone(ZoneOffset.UTC).plusMonths(interval * 3L); long monthDiff = ((zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime.getMonthValue() @@ -86,8 +87,8 @@ public static long roundQuarter(long utcMillis, int interval) { * @return Rounded date/time value in utc millis */ public static long roundYear(long utcMillis, int interval) { - ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); - ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID); + ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(ZoneOffset.UTC); int yearDiff = zonedDateTime.getYear() - initDateTime.getYear(); int yearToAdd = (yearDiff / interval) * interval; return initDateTime.plusYears(yearToAdd).toInstant().toEpochMilli(); @@ -136,11 +137,10 @@ public Boolean isValidMySqlTimeZoneId(ZoneId zone) { * Extracts LocalDateTime from a datetime ExprValue. Uses `FunctionProperties` for * `ExprTimeValue`. */ - public static LocalDateTime extractDateTime( - ExprValue value, FunctionProperties functionProperties) { + public static Instant extractTimestamp(ExprValue value, FunctionProperties functionProperties) { return value instanceof ExprTimeValue - ? ((ExprTimeValue) value).datetimeValue(functionProperties) - : value.datetimeValue(); + ? ((ExprTimeValue) value).timestampValue(functionProperties) + : value.timestampValue(); } /** @@ -151,6 +151,4 @@ public static LocalDate extractDate(ExprValue value, FunctionProperties function ? ((ExprTimeValue) value).dateValue(functionProperties) : value.dateValue(); } - - public static final ZoneId UTC_ZONE_ID = ZoneId.of("UTC"); } diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java index 2f4d6e8ada..8d935b11d2 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java @@ -157,7 +157,7 @@ public void filter_relation_with_invalid_qualifiedName_ExpressionEvaluationExcep assertEquals( "= function expected {[BYTE,BYTE],[SHORT,SHORT],[INTEGER,INTEGER],[LONG,LONG]," + "[FLOAT,FLOAT],[DOUBLE,DOUBLE],[STRING,STRING],[BOOLEAN,BOOLEAN],[DATE,DATE]," - + "[TIME,TIME],[DATETIME,DATETIME],[TIMESTAMP,TIMESTAMP],[INTERVAL,INTERVAL]," + + "[TIME,TIME],[TIMESTAMP,TIMESTAMP],[INTERVAL,INTERVAL]," + "[STRUCT,STRUCT],[ARRAY,ARRAY]}, but get [STRING,INTEGER]", exception.getMessage()); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java index 01fe4a5e4e..b5a3d61211 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java @@ -10,11 +10,11 @@ import static org.opensearch.sql.data.model.ExprValueUtils.integerValue; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import org.junit.jupiter.api.Test; import org.opensearch.sql.exception.ExpressionEvaluationException; @@ -36,8 +36,6 @@ public void timeValueInterfaceTest() { // without a FunctionProperties object var exception = assertThrows(ExpressionEvaluationException.class, timeValue::dateValue); assertEquals("invalid to get dateValue from value of type TIME", exception.getMessage()); - exception = assertThrows(ExpressionEvaluationException.class, timeValue::datetimeValue); - assertEquals("invalid to get datetimeValue from value of type TIME", exception.getMessage()); exception = assertThrows(ExpressionEvaluationException.class, timeValue::timestampValue); assertEquals("invalid to get timestampValue from value of type TIME", exception.getMessage()); @@ -45,9 +43,11 @@ public void timeValueInterfaceTest() { var today = LocalDate.now(functionProperties.getQueryStartClock()); assertEquals(today, timeValue.dateValue(functionProperties)); - assertEquals(today.atTime(1, 1, 1), timeValue.datetimeValue(functionProperties)); assertEquals( - ZonedDateTime.of(LocalTime.parse("01:01:01").atDate(today), UTC_ZONE_ID).toInstant(), + today.atTime(1, 1, 1), + LocalDateTime.ofInstant(timeValue.timestampValue(functionProperties), ZoneOffset.UTC)); + assertEquals( + ZonedDateTime.of(LocalTime.parse("01:01:01").atDate(today), ZoneOffset.UTC).toInstant(), timeValue.timestampValue(functionProperties)); assertEquals("01:01:01", timeValue.value()); @@ -63,13 +63,15 @@ public void timestampValueInterfaceTest() { assertEquals(TIMESTAMP, timestampValue.type()); assertEquals( - ZonedDateTime.of(LocalDateTime.parse("2020-07-07T01:01:01"), UTC_ZONE_ID).toInstant(), + ZonedDateTime.of(LocalDateTime.parse("2020-07-07T01:01:01"), ZoneOffset.UTC).toInstant(), timestampValue.timestampValue()); assertEquals("2020-07-07 01:01:01", timestampValue.value()); assertEquals("TIMESTAMP '2020-07-07 01:01:01'", timestampValue.toString()); assertEquals(LocalDate.parse("2020-07-07"), timestampValue.dateValue()); assertEquals(LocalTime.parse("01:01:01"), timestampValue.timeValue()); - assertEquals(LocalDateTime.parse("2020-07-07T01:01:01"), timestampValue.datetimeValue()); + assertEquals( + LocalDateTime.parse("2020-07-07T01:01:01"), + LocalDateTime.ofInstant(timestampValue.timestampValue(), ZoneOffset.UTC)); assertThrows( ExpressionEvaluationException.class, () -> integerValue(1).timestampValue(), @@ -82,32 +84,17 @@ public void dateValueInterfaceTest() { assertEquals(LocalDate.parse("2012-07-07"), dateValue.dateValue()); assertEquals(LocalTime.parse("00:00:00"), dateValue.timeValue()); - assertEquals(LocalDateTime.parse("2012-07-07T00:00:00"), dateValue.datetimeValue()); assertEquals( - ZonedDateTime.of(LocalDateTime.parse("2012-07-07T00:00:00"), UTC_ZONE_ID).toInstant(), + LocalDateTime.parse("2012-07-07T00:00:00"), + LocalDateTime.ofInstant(dateValue.timestampValue(), ZoneOffset.UTC)); + assertEquals( + ZonedDateTime.of(LocalDateTime.parse("2012-07-07T00:00:00"), ZoneOffset.UTC).toInstant(), dateValue.timestampValue()); ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).dateValue()); assertEquals("invalid to get dateValue from value of type INTEGER", exception.getMessage()); } - @Test - public void datetimeValueInterfaceTest() { - ExprValue datetimeValue = new ExprDatetimeValue("2020-08-17 19:44:00"); - - assertEquals(LocalDateTime.parse("2020-08-17T19:44:00"), datetimeValue.datetimeValue()); - assertEquals(LocalDate.parse("2020-08-17"), datetimeValue.dateValue()); - assertEquals(LocalTime.parse("19:44:00"), datetimeValue.timeValue()); - assertEquals( - ZonedDateTime.of(LocalDateTime.parse("2020-08-17T19:44:00"), UTC_ZONE_ID).toInstant(), - datetimeValue.timestampValue()); - assertEquals("DATETIME '2020-08-17 19:44:00'", datetimeValue.toString()); - assertThrows( - ExpressionEvaluationException.class, - () -> integerValue(1).datetimeValue(), - "invalid to get datetimeValue from value of type INTEGER"); - } - @Test public void dateInUnsupportedFormat() { SemanticCheckException exception = @@ -137,21 +124,12 @@ public void timestampInUnsupportedFormat() { } @Test - public void datetimeInUnsupportedFormat() { - SemanticCheckException exception = - assertThrows( - SemanticCheckException.class, () -> new ExprDatetimeValue("2020-07-07T01:01:01Z")); - assertEquals( - "datetime:2020-07-07T01:01:01Z in unsupported format, " - + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", - exception.getMessage()); - } - - @Test - public void stringDateTimeValue() { + public void stringTimestampValue() { ExprValue stringValue = new ExprStringValue("2020-08-17 19:44:00"); - assertEquals(LocalDateTime.parse("2020-08-17T19:44:00"), stringValue.datetimeValue()); + assertEquals( + LocalDateTime.parse("2020-08-17T19:44:00").atZone(ZoneOffset.UTC).toInstant(), + stringValue.timestampValue()); assertEquals(LocalDate.parse("2020-08-17"), stringValue.dateValue()); assertEquals(LocalTime.parse("19:44:00"), stringValue.timeValue()); assertEquals("\"2020-08-17 19:44:00\"", stringValue.toString()); @@ -159,10 +137,9 @@ public void stringDateTimeValue() { SemanticCheckException exception = assertThrows( SemanticCheckException.class, - () -> new ExprStringValue("2020-07-07T01:01:01Z").datetimeValue()); + () -> new ExprStringValue("2020-07-07T01:01:01Z").timestampValue()); assertEquals( - "datetime:2020-07-07T01:01:01Z in unsupported format, " - + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + "date:2020-07-07T01:01:01Z in unsupported format, " + "please use 'yyyy-MM-dd'", exception.getMessage()); } @@ -170,7 +147,8 @@ public void stringDateTimeValue() { public void stringDateValue() { ExprValue stringValue = new ExprStringValue("2020-08-17"); - assertEquals(LocalDateTime.parse("2020-08-17T00:00:00"), stringValue.datetimeValue()); + assertEquals( + ZonedDateTime.parse("2020-08-17T00:00:00Z").toInstant(), stringValue.timestampValue()); assertEquals(LocalDate.parse("2020-08-17"), stringValue.dateValue()); assertEquals("\"2020-08-17\"", stringValue.toString()); @@ -228,28 +206,9 @@ public void timestampWithVariableNanoPrecision() { assertEquals(LocalDate.parse(dateValue), timestampValue.dateValue()); assertEquals(LocalTime.parse(timeWithNanos), timestampValue.timeValue()); String localDateTime = String.format("%sT%s", dateValue, timeWithNanos); - assertEquals(LocalDateTime.parse(localDateTime), timestampValue.datetimeValue()); - } - } - - @Test - public void datetimeWithVariableNanoPrecision() { - String dateValue = "2020-08-17"; - String timeWithNanosFormat = "10:11:12.%s"; - - // Check all lengths of nanosecond precision, up to max precision accepted - StringBuilder nanos = new StringBuilder(); - for (int nanoPrecision = 1; nanoPrecision <= NANOS_PRECISION_MAX; nanoPrecision++) { - nanos.append(nanoPrecision); - String timeWithNanos = String.format(timeWithNanosFormat, nanos); - - String datetimeString = String.format("%s %s", dateValue, timeWithNanos); - ExprValue datetimeValue = new ExprDatetimeValue(datetimeString); - - assertEquals(LocalDate.parse(dateValue), datetimeValue.dateValue()); - assertEquals(LocalTime.parse(timeWithNanos), datetimeValue.timeValue()); - String localDateTime = String.format("%sT%s", dateValue, timeWithNanos); - assertEquals(LocalDateTime.parse(localDateTime), datetimeValue.datetimeValue()); + assertEquals( + LocalDateTime.parse(localDateTime), + LocalDateTime.ofInstant(timestampValue.timestampValue(), ZoneOffset.UTC)); } } @@ -265,18 +224,6 @@ public void timestampOverMaxNanoPrecision() { exception.getMessage()); } - @Test - public void datetimeOverMaxNanoPrecision() { - SemanticCheckException exception = - assertThrows( - SemanticCheckException.class, - () -> new ExprDatetimeValue("2020-07-07 01:01:01.1234567890")); - assertEquals( - "datetime:2020-07-07 01:01:01.1234567890 in unsupported format, " - + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", - exception.getMessage()); - } - @Test public void timeOverMaxNanoPrecision() { SemanticCheckException exception = diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java index b965dff643..ee30a0f0c6 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java @@ -11,7 +11,7 @@ import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL; -import static org.opensearch.sql.utils.DateTimeUtils.extractDateTime; +import static org.opensearch.sql.utils.DateTimeUtils.extractTimestamp; import java.time.LocalDate; import java.time.Period; @@ -39,22 +39,6 @@ public void dateValueCompare() { assertEquals(-1, new ExprDateValue("2012-08-07").compareTo(new ExprDateValue("2012-08-08"))); } - @Test - public void datetimeValueCompare() { - assertEquals( - 0, - new ExprDatetimeValue("2012-08-07 18:00:00") - .compareTo(new ExprDatetimeValue("2012-08-07 18:00:00"))); - assertEquals( - 1, - new ExprDatetimeValue("2012-08-07 19:00:00") - .compareTo(new ExprDatetimeValue("2012-08-07 18:00:00"))); - assertEquals( - -1, - new ExprDatetimeValue("2012-08-07 18:00:00") - .compareTo(new ExprDatetimeValue("2012-08-07 19:00:00"))); - } - @Test public void timestampValueCompare() { assertEquals( @@ -73,26 +57,14 @@ public void timestampValueCompare() { private static Stream getEqualDatetimeValuesOfDifferentTypes() { return Stream.of( - Arguments.of( - new ExprTimestampValue("1961-04-12 09:07:00"), - new ExprDatetimeValue("1961-04-12 09:07:00")), Arguments.of( new ExprTimestampValue("1984-11-22 00:00:00"), new ExprDateValue("1984-11-22")), Arguments.of( new ExprTimestampValue(LocalDate.now() + " 00:00:00"), new ExprDateValue(LocalDate.now())), - Arguments.of( - new ExprDatetimeValue(LocalDate.now() + " 17:42:15"), new ExprTimeValue("17:42:15")), - Arguments.of( - new ExprDatetimeValue("2012-08-07 19:14:38"), - new ExprTimestampValue("2012-08-07 19:14:38")), - Arguments.of(new ExprDateValue("2012-08-07"), new ExprDatetimeValue("2012-08-07 00:00:00")), - Arguments.of(new ExprDateValue("2007-01-27"), new ExprDatetimeValue("2007-01-27 00:00:00")), Arguments.of(new ExprDateValue(LocalDate.now()), new ExprTimeValue("00:00:00")), Arguments.of( new ExprTimestampValue("1984-11-22 00:00:00"), new ExprDateValue("1984-11-22")), - Arguments.of( - new ExprTimeValue("19:14:38"), new ExprDatetimeValue(LocalDate.now() + " 19:14:38")), Arguments.of( new ExprTimeValue("17:42:15"), new ExprTimestampValue(LocalDate.now() + " 17:42:15"))); } @@ -106,34 +78,17 @@ private static Stream getEqualDatetimeValuesOfDifferentTypes() { public void compareEqDifferentDateTimeValueTypes(ExprValue left, ExprValue right) { assertEquals( 0, - extractDateTime(left, functionProperties) - .compareTo(extractDateTime(right, functionProperties))); - assertEquals( - 0, - extractDateTime(right, functionProperties) - .compareTo(extractDateTime(left, functionProperties))); + extractTimestamp(left, functionProperties) + .compareTo(extractTimestamp(right, functionProperties))); } private static Stream getNotEqualDatetimeValuesOfDifferentTypes() { return Stream.of( - Arguments.of( - new ExprDatetimeValue("2012-08-07 19:14:38"), - new ExprTimestampValue("1961-04-12 09:07:00")), - Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), new ExprTimeValue("09:07:00")), - Arguments.of( - new ExprDatetimeValue(LocalDate.now() + " 19:14:38"), new ExprTimeValue("09:07:00")), - Arguments.of(new ExprDatetimeValue("2012-08-07 00:00:00"), new ExprDateValue("1961-04-12")), - Arguments.of(new ExprDatetimeValue("1961-04-12 19:14:38"), new ExprDateValue("1961-04-12")), - Arguments.of(new ExprDateValue("1984-11-22"), new ExprDatetimeValue("1961-04-12 19:14:38")), Arguments.of( new ExprDateValue("1984-11-22"), new ExprTimestampValue("2020-09-16 17:30:00")), Arguments.of(new ExprDateValue("1984-11-22"), new ExprTimeValue("19:14:38")), Arguments.of(new ExprTimeValue("19:14:38"), new ExprDateValue(LocalDate.now())), - Arguments.of(new ExprTimeValue("19:14:38"), new ExprDatetimeValue("2012-08-07 09:07:00")), Arguments.of(new ExprTimeValue("19:14:38"), new ExprTimestampValue("1984-02-03 04:05:07")), - Arguments.of( - new ExprTimestampValue("2012-08-07 19:14:38"), - new ExprDatetimeValue("1961-04-12 09:07:00")), Arguments.of(new ExprTimestampValue("2012-08-07 19:14:38"), new ExprTimeValue("09:07:00")), Arguments.of( new ExprTimestampValue(LocalDate.now() + " 19:14:38"), new ExprTimeValue("09:07:00")), @@ -152,12 +107,8 @@ private static Stream getNotEqualDatetimeValuesOfDifferentTypes() { public void compareNeqDifferentDateTimeValueTypes(ExprValue left, ExprValue right) { assertNotEquals( 0, - extractDateTime(left, functionProperties) - .compareTo(extractDateTime(right, functionProperties))); - assertNotEquals( - 0, - extractDateTime(right, functionProperties) - .compareTo(extractDateTime(left, functionProperties))); + extractTimestamp(left, functionProperties) + .compareTo(extractTimestamp(right, functionProperties))); } @Test diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java index c879384955..0baf5052e4 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java @@ -13,13 +13,11 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.INTERVAL; import static org.opensearch.sql.data.type.ExprCoreType.STRING; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -29,6 +27,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.AbstractMap; import java.util.ArrayList; @@ -70,7 +69,6 @@ public class ExprValueUtilsTest { new ExprTupleValue(testTuple), new ExprDateValue("2012-08-07"), new ExprTimeValue("18:00:00"), - new ExprDatetimeValue("2012-08-07 18:00:00"), new ExprTimestampValue("2012-08-07 18:00:00"), new ExprIntervalValue(Duration.ofSeconds(100))); @@ -95,7 +93,6 @@ public class ExprValueUtilsTest { Arrays.asList( ExprValue::dateValue, ExprValue::timeValue, - ExprValue::datetimeValue, ExprValue::timestampValue, ExprValue::intervalValue); private static List> allValueExtractor = @@ -113,7 +110,7 @@ public class ExprValueUtilsTest { ExprCoreType.DOUBLE); private static List nonNumberTypes = Arrays.asList(STRING, BOOLEAN, ARRAY, STRUCT); private static List dateAndTimeTypes = - Arrays.asList(DATE, TIME, DATETIME, TIMESTAMP, INTERVAL); + Arrays.asList(DATE, TIME, TIMESTAMP, INTERVAL); private static List allTypes = Lists.newArrayList(Iterables.concat(numberTypes, nonNumberTypes, dateAndTimeTypes)); @@ -132,8 +129,8 @@ private static Stream getValueTestArgumentStream() { ImmutableMap.of("1", integerValue(1)), LocalDate.parse("2012-08-07"), LocalTime.parse("18:00:00"), - LocalDateTime.parse("2012-08-07T18:00:00"), - ZonedDateTime.of(LocalDateTime.parse("2012-08-07T18:00:00"), UTC_ZONE_ID).toInstant(), + ZonedDateTime.of(LocalDateTime.parse("2012-08-07T18:00:00"), ZoneOffset.UTC) + .toInstant(), Duration.ofSeconds(100)); Stream.Builder builder = Stream.builder(); for (int i = 0; i < expectedValues.size(); i++) { @@ -237,9 +234,6 @@ public void constructDateAndTimeValue() { assertEquals( new ExprDateValue("2012-07-07"), ExprValueUtils.fromObjectValue("2012-07-07", DATE)); assertEquals(new ExprTimeValue("01:01:01"), ExprValueUtils.fromObjectValue("01:01:01", TIME)); - assertEquals( - new ExprDatetimeValue("2012-07-07 01:01:01"), - ExprValueUtils.fromObjectValue("2012-07-07 01:01:01", DATETIME)); assertEquals( new ExprTimestampValue("2012-07-07 01:01:01"), ExprValueUtils.fromObjectValue("2012-07-07 01:01:01", TIMESTAMP)); @@ -260,9 +254,6 @@ public void hashCodeTest() { new ExprDateValue("2012-08-07").hashCode(), new ExprDateValue("2012-08-07").hashCode()); assertEquals( new ExprTimeValue("18:00:00").hashCode(), new ExprTimeValue("18:00:00").hashCode()); - assertEquals( - new ExprDatetimeValue("2012-08-07 18:00:00").hashCode(), - new ExprDatetimeValue("2012-08-07 18:00:00").hashCode()); assertEquals( new ExprTimestampValue("2012-08-07 18:00:00").hashCode(), new ExprTimestampValue("2012-08-07 18:00:00").hashCode()); diff --git a/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java b/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java index 1def15cc6f..ec45c3dfec 100644 --- a/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java +++ b/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java @@ -12,7 +12,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -45,7 +44,6 @@ public void isCompatible() { assertTrue(TIMESTAMP.isCompatible(STRING)); assertTrue(DATE.isCompatible(STRING)); assertTrue(TIME.isCompatible(STRING)); - assertTrue(DATETIME.isCompatible(STRING)); } @Test diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java index f465a6477e..10551d43a5 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java @@ -9,7 +9,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -19,6 +18,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.util.List; import org.junit.jupiter.api.Test; import org.opensearch.sql.data.model.ExprValue; @@ -89,12 +89,6 @@ public void avg_date_no_values() { assertTrue(result.isNull()); } - @Test - public void avg_datetime_no_values() { - ExprValue result = aggregation(DSL.avg(DSL.ref("dummy", DATETIME)), List.of()); - assertTrue(result.isNull()); - } - @Test public void avg_timestamp_no_values() { ExprValue result = aggregation(DSL.avg(DSL.ref("dummy", TIMESTAMP)), List.of()); @@ -113,12 +107,6 @@ public void avg_date() { assertEquals(LocalDate.of(2007, 7, 2), result.dateValue()); } - @Test - public void avg_datetime() { - var result = aggregation(DSL.avg(DSL.datetime(DSL.ref("datetime_value", STRING))), tuples); - assertEquals(LocalDateTime.of(2012, 7, 2, 3, 30), result.datetimeValue()); - } - @Test public void avg_time() { ExprValue result = aggregation(DSL.avg(DSL.time(DSL.ref("time_value", STRING))), tuples); @@ -129,7 +117,9 @@ public void avg_time() { public void avg_timestamp() { var result = aggregation(DSL.avg(DSL.timestamp(DSL.ref("timestamp_value", STRING))), tuples); assertEquals(TIMESTAMP, result.type()); - assertEquals(LocalDateTime.of(2012, 7, 2, 3, 30), result.datetimeValue()); + assertEquals( + LocalDateTime.of(2012, 7, 2, 3, 30), + result.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java index 50bd3fedfe..2159780dc0 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -63,12 +62,6 @@ public void count_timestamp_field_expression() { assertEquals(4, result.value()); } - @Test - public void count_datetime_field_expression() { - ExprValue result = aggregation(DSL.count(DSL.ref("datetime_value", DATETIME)), tuples); - assertEquals(4, result.value()); - } - @Test public void count_arithmetic_expression() { ExprValue result = diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java index c6cd380ad5..f952eff982 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java @@ -9,7 +9,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -62,12 +61,6 @@ public void test_max_date() { assertEquals("2040-01-01", result.value()); } - @Test - public void test_max_datetime() { - ExprValue result = aggregation(DSL.max(DSL.ref("datetime_value", DATETIME)), tuples); - assertEquals("2040-01-01 07:00:00", result.value()); - } - @Test public void test_max_time() { ExprValue result = aggregation(DSL.max(DSL.ref("time_value", TIME)), tuples); diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java index 1aee0f3a6c..8a3f3d15a3 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java @@ -9,7 +9,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -62,12 +61,6 @@ public void test_min_date() { assertEquals("1970-01-01", result.value()); } - @Test - public void test_min_datetime() { - ExprValue result = aggregation(DSL.min(DSL.ref("datetime_value", DATETIME)), tuples); - assertEquals("1970-01-01 19:00:00", result.value()); - } - @Test public void test_min_time() { ExprValue result = aggregation(DSL.min(DSL.ref("time_value", TIME)), tuples); diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java index eed83f4fa9..49947976b4 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java @@ -6,13 +6,14 @@ package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.TIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.temporal.Temporal; import java.util.stream.Stream; import org.junit.jupiter.api.Test; @@ -23,7 +24,7 @@ public class AddTimeAndSubTimeTest extends DateTimeTestBase { @Test - // (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME + // (TIME, TIME/DATE/TIMESTAMP) -> TIME public void return_time_when_first_arg_is_time() { var res = addtime(LocalTime.of(21, 0), LocalTime.of(0, 5)); assertEquals(TIME, res.type()); @@ -70,31 +71,10 @@ public void time_limited_by_24_hours() { } // Function signature is: - // (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME + // (DATE/TIMESTAMP, TIME/DATE/TIMESTAMP) -> TIMESTAMP private static Stream getTestData() { return Stream.of( - // DATETIME and TIME/DATE/DATETIME/TIMESTAMP - Arguments.of( - LocalDateTime.of(1961, 4, 12, 9, 7), - LocalTime.of(1, 48), - LocalDateTime.of(1961, 4, 12, 10, 55), - LocalDateTime.of(1961, 4, 12, 7, 19)), - Arguments.of( - LocalDateTime.of(1961, 4, 12, 9, 7), - LocalDate.of(2000, 1, 1), - LocalDateTime.of(1961, 4, 12, 9, 7), - LocalDateTime.of(1961, 4, 12, 9, 7)), - Arguments.of( - LocalDateTime.of(1961, 4, 12, 9, 7), - LocalDateTime.of(1235, 5, 6, 1, 48), - LocalDateTime.of(1961, 4, 12, 10, 55), - LocalDateTime.of(1961, 4, 12, 7, 19)), - Arguments.of( - LocalDateTime.of(1961, 4, 12, 9, 7), - Instant.ofEpochSecond(42), - LocalDateTime.of(1961, 4, 12, 9, 7, 42), - LocalDateTime.of(1961, 4, 12, 9, 6, 18)), - // DATE and TIME/DATE/DATETIME/TIMESTAMP + // DATE and TIME/DATE/TIMESTAMP Arguments.of( LocalDate.of(1961, 4, 12), LocalTime.of(9, 7), @@ -115,7 +95,7 @@ private static Stream getTestData() { Instant.ofEpochSecond(42), LocalDateTime.of(1961, 4, 12, 0, 0, 42), LocalDateTime.of(1961, 4, 11, 23, 59, 18)), - // TIMESTAMP and TIME/DATE/DATETIME/TIMESTAMP + // TIMESTAMP and TIME/DATE/TIMESTAMP Arguments.of( Instant.ofEpochSecond(42), LocalTime.of(9, 7), @@ -154,11 +134,13 @@ public void return_datetime_when_first_arg_is_not_time( LocalDateTime addTimeExpectedResult, LocalDateTime subTimeExpectedResult) { var res = addtime(arg1, arg2); - assertEquals(DATETIME, res.type()); - assertEquals(addTimeExpectedResult, res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + addTimeExpectedResult, res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); res = subtime(arg1, arg2); - assertEquals(DATETIME, res.type()); - assertEquals(subTimeExpectedResult, res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + subTimeExpectedResult, res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java index 17ff4f67ab..707f995138 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java @@ -6,11 +6,13 @@ package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.model.ExprValueUtils.nullValue; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import org.junit.jupiter.api.Test; -import org.opensearch.sql.data.model.ExprDatetimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; +import org.opensearch.sql.exception.SemanticCheckException; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.ExpressionTestBase; import org.opensearch.sql.expression.FunctionExpression; @@ -21,32 +23,32 @@ class ConvertTZTest extends ExpressionTestBase { public void invalidDate() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2021-04-31 10:00:00")), + DSL.timestamp(DSL.literal("2021-04-31 10:00:00")), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(nullValue(), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertThrows(SemanticCheckException.class, expr::valueOf); } @Test public void conversionFromNoOffset() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("+10:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-16 08:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-16 08:00:00"), expr.valueOf()); } @Test public void conversionToInvalidInput3Over() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("+16:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -54,10 +56,10 @@ public void conversionToInvalidInput3Over() { public void conversionToInvalidInput3Under() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("-16:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -65,10 +67,10 @@ public void conversionToInvalidInput3Under() { public void conversionFromPositiveToPositive() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+15:00"), DSL.literal("+01:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -76,10 +78,10 @@ public void conversionFromPositiveToPositive() { public void invalidInput2Under() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("-15:00"), DSL.literal("+01:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -87,10 +89,10 @@ public void invalidInput2Under() { public void invalidInput3Over() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("-12:00"), DSL.literal("+15:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -98,32 +100,32 @@ public void invalidInput3Over() { public void conversionToPositiveEdge() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("+14:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-16 12:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-16 12:00:00"), expr.valueOf()); } @Test public void conversionToNegativeEdge() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:01"), DSL.literal("-13:59")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-15 08:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-15 08:00:00"), expr.valueOf()); } @Test public void invalidInput2() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+)()"), DSL.literal("+12:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -131,10 +133,10 @@ public void invalidInput2() { public void invalidInput3() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("test")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -142,7 +144,7 @@ public void invalidInput3() { public void invalidInput1() { FunctionExpression expr = DSL.convert_tz(DSL.literal("test"), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -150,32 +152,32 @@ public void invalidInput1() { public void invalidDateFeb30() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2021-02-30 10:00:00")), + DSL.timestamp(DSL.literal("2021-02-30 10:00:00")), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(nullValue(), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertThrows(SemanticCheckException.class, expr::valueOf); } @Test public void invalidDateApril31() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2021-04-31 10:00:00")), + DSL.timestamp(DSL.literal("2021-04-31 10:00:00")), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(nullValue(), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertThrows(SemanticCheckException.class, expr::valueOf); } @Test public void invalidMonth13() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2021-13-03 10:00:00")), + DSL.timestamp(DSL.literal("2021-13-03 10:00:00")), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(nullValue(), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertThrows(SemanticCheckException.class, expr::valueOf); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java index 52db0a17e5..519e97bdc6 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java @@ -8,7 +8,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.Duration; import java.time.Instant; @@ -29,81 +29,99 @@ private LocalDate today() { @Test public void adddate_returns_datetime_when_args_are_time_and_time_interval() { var res = adddate(LocalTime.MIN, Duration.ofHours(1).plusMinutes(2)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(1, 2).atDate(today()), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalTime.of(1, 2).atDate(today()), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void date_add_returns_datetime_when_args_are_time_and_time_interval() { var res = date_add(LocalTime.of(10, 20, 30), Duration.ofHours(1).plusMinutes(2).plusSeconds(42)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(11, 23, 12).atDate(today()), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalTime.of(11, 23, 12).atDate(today()), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_time_limited_by_24_hours() { var res = adddate(LocalTime.MAX, Duration.ofNanos(1)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.MIN, res.datetimeValue().toLocalTime()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalTime.MIN, res.timestampValue().atZone(ZoneOffset.UTC).toLocalTime()); } @Test public void date_add_time_limited_by_24_hours() { var res = date_add(LocalTime.of(10, 20, 30), Duration.ofHours(20).plusMinutes(50).plusSeconds(7)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(7, 10, 37), res.datetimeValue().toLocalTime()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalTime.of(7, 10, 37), res.timestampValue().atZone(ZoneOffset.UTC).toLocalTime()); } @Test public void adddate_returns_datetime_when_args_are_date_and_date_interval() { var res = adddate(LocalDate.of(2020, 2, 20), Period.of(3, 11, 21)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDate.of(2024, 2, 10).atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDate.of(2024, 2, 10).atStartOfDay(), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void date_add_returns_datetime_when_args_are_date_and_date_interval() { var res = date_add(LocalDate.of(1961, 4, 12), Period.of(50, 50, 50)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDate.of(2015, 8, 1).atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDate.of(2015, 8, 1).atStartOfDay(), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_returns_datetime_when_args_are_date_and_time_interval() { var res = adddate(LocalDate.of(2020, 2, 20), Duration.ofHours(1).plusMinutes(2)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 2, 20, 1, 2), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(2020, 2, 20, 1, 2), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void date_add_returns_datetime_when_args_are_date_and_time_interval() { var res = date_add(LocalDate.of(1961, 4, 12), Duration.ofHours(9).plusMinutes(7)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 9, 7), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(1961, 4, 12, 9, 7), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_returns_datetime_when_args_are_time_and_date_interval() { // Date based on today var res = adddate(LocalTime.of(1, 2, 0), Period.ofDays(1)); - assertEquals(DATETIME, res.type()); - assertEquals(today().plusDays(1).atTime(LocalTime.of(1, 2, 0)), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + today().plusDays(1).atTime(LocalTime.of(1, 2, 0)), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void date_add_returns_datetime_when_args_are_time_and_date_interval() { var res = date_add(LocalTime.MIDNIGHT, Period.ofDays(0)); - assertEquals(DATETIME, res.type()); - assertEquals(today().atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + today().atStartOfDay(), res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_returns_datetime_when_first_arg_is_datetime() { var res = adddate(LocalDateTime.of(1961, 4, 12, 9, 7), Duration.ofMinutes(108)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 10, 55), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(1961, 4, 12, 10, 55), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test @@ -111,30 +129,34 @@ public void date_add_returns_datetime_when_first_arg_is_timestamp() { var res = date_add( LocalDateTime.of(1961, 4, 12, 9, 7).toInstant(ZoneOffset.UTC), Duration.ofMinutes(108)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 10, 55), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(1961, 4, 12, 10, 55), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_accepts_negative_interval() { var res = adddate(LocalDateTime.of(2020, 10, 20, 14, 42), Duration.ofDays(-10)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 10, 10, 14, 42), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(2020, 10, 10, 14, 42), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); assertEquals(subdate(LocalDateTime.of(2020, 10, 20, 14, 42), Duration.ofDays(10)), res); } @Test public void adddate_has_second_signature_but_not_date_add() { var res = adddate(LocalDateTime.of(1961, 4, 12, 9, 7), 100500); - assertEquals(DATETIME, res.type()); + assertEquals(TIMESTAMP, res.type()); var exception = assertThrows( ExpressionEvaluationException.class, () -> date_add(LocalDateTime.of(1961, 4, 12, 9, 7), 100500)); assertEquals( - "date_add function expected {[DATE,INTERVAL],[DATETIME,INTERVAL]," - + "[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get [DATETIME,INTEGER]", + "date_add function expected {[DATE,INTERVAL],[TIMESTAMP,INTERVAL]," + + "[TIME,INTERVAL]}, but get [TIMESTAMP,INTEGER]", exception.getMessage()); } @@ -148,23 +170,29 @@ public void adddate_returns_date_when_args_are_date_and_days() { @Test public void adddate_returns_datetime_when_args_are_date_but_days() { var res = adddate(LocalDate.of(2000, 1, 1).atStartOfDay(), 2); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2000, 1, 3, 0, 0), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(2000, 1, 3, 0, 0), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); res = adddate(LocalTime.now(), 2); - assertEquals(DATETIME, res.type()); + assertEquals(TIMESTAMP, res.type()); assertEquals(today().plusDays(2), res.dateValue()); res = adddate(Instant.ofEpochSecond(42), 2); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1970, 1, 3, 0, 0, 42), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(1970, 1, 3, 0, 0, 42), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_accepts_negative_days() { var res = adddate(LocalDateTime.of(2020, 10, 20, 8, 16, 32), -40); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 10, 20, 8, 16, 32).minusDays(40), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(2020, 10, 20, 8, 16, 32).minusDays(40), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); assertEquals(subdate(LocalDateTime.of(2020, 10, 20, 8, 16, 32), 40), res); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java index a630758456..16d585d73e 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java @@ -33,7 +33,7 @@ public class DateDiffTest extends DateTimeTestBase { private static final LocalDateTime dateTimeSample2 = LocalDateTime.of(1993, 3, 4, 5, 6); // Function signature is: - // (DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME) -> LONG + // (DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME) -> LONG private static Stream getTestData() { // Arguments are: first argument for `DATE_DIFF` function, second argument and expected result. return Stream.of( diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java index 460e12384b..123ecda0bd 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java @@ -8,7 +8,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.Duration; import java.time.Instant; @@ -18,10 +18,15 @@ import java.time.Period; import java.time.ZoneOffset; import org.junit.jupiter.api.Test; +import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.exception.ExpressionEvaluationException; public class DateSubAndSubDateTest extends DateTimeTestBase { + private LocalDateTime toLocalDateTime(ExprValue res) { + return res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); + } + private LocalDate today() { return LocalDate.now(functionProperties.getQueryStartClock()); } @@ -29,81 +34,82 @@ private LocalDate today() { @Test public void subdate_returns_datetime_when_args_are_time_and_time_interval() { var res = subdate(LocalTime.of(21, 0), Duration.ofHours(1).plusMinutes(2)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(19, 58).atDate(today()), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalTime.of(19, 58).atDate(today()), toLocalDateTime(res)); } @Test public void date_sub_returns_datetime_when_args_are_time_and_time_interval() { var res = date_sub(LocalTime.of(10, 20, 30), Duration.ofHours(1).plusMinutes(2).plusSeconds(42)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(9, 17, 48).atDate(today()), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalTime.of(9, 17, 48).atDate(today()), toLocalDateTime(res)); } @Test public void subdate_time_limited_by_24_hours() { var res = subdate(LocalTime.MIN, Duration.ofNanos(1)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.MAX, res.datetimeValue().toLocalTime()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalTime.MAX, res.timestampValue().atZone(ZoneOffset.UTC).toLocalTime()); } @Test public void date_sub_time_limited_by_24_hours() { var res = date_sub(LocalTime.of(10, 20, 30), Duration.ofHours(20).plusMinutes(50).plusSeconds(7)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(13, 30, 23), res.datetimeValue().toLocalTime()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalTime.of(13, 30, 23), res.timestampValue().atZone(ZoneOffset.UTC).toLocalTime()); } @Test public void subdate_returns_datetime_when_args_are_date_and_date_interval() { var res = subdate(LocalDate.of(2020, 2, 20), Period.of(3, 11, 21)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDate.of(2016, 2, 28).atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDate.of(2016, 2, 28).atStartOfDay(), toLocalDateTime(res)); } @Test public void date_sub_returns_datetime_when_args_are_date_and_date_interval() { var res = date_sub(LocalDate.of(1961, 4, 12), Period.of(50, 50, 50)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDate.of(1906, 12, 24).atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDate.of(1906, 12, 24).atStartOfDay(), toLocalDateTime(res)); } @Test public void subdate_returns_datetime_when_args_are_date_and_time_interval() { var res = subdate(LocalDate.of(2020, 2, 20), Duration.ofHours(1).plusMinutes(2)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 2, 19, 22, 58), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(2020, 2, 19, 22, 58), toLocalDateTime(res)); } @Test public void date_sub_returns_datetime_when_args_are_date_and_time_interval() { var res = date_sub(LocalDate.of(1961, 4, 12), Duration.ofHours(9).plusMinutes(7)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 11, 14, 53), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1961, 4, 11, 14, 53), toLocalDateTime(res)); } @Test public void subdate_returns_datetime_when_args_are_time_and_date_interval() { // Date based on today var res = subdate(LocalTime.of(1, 2, 0), Period.ofDays(1)); - assertEquals(DATETIME, res.type()); - assertEquals(today().minusDays(1).atTime(LocalTime.of(1, 2, 0)), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(today().minusDays(1).atTime(LocalTime.of(1, 2, 0)), toLocalDateTime(res)); } @Test public void date_sub_returns_datetime_when_args_are_time_and_date_interval() { var res = date_sub(LocalTime.MIDNIGHT, Period.ofDays(0)); - assertEquals(DATETIME, res.type()); - assertEquals(today().atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(today().atStartOfDay(), toLocalDateTime(res)); } @Test public void subdate_returns_datetime_when_first_arg_is_datetime() { var res = subdate(LocalDateTime.of(1961, 4, 12, 9, 7), Duration.ofMinutes(108)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 7, 19), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1961, 4, 12, 7, 19), toLocalDateTime(res)); } @Test @@ -111,30 +117,30 @@ public void date_sub_returns_datetime_when_first_arg_is_timestamp() { var res = date_sub( LocalDateTime.of(1961, 4, 12, 9, 7).toInstant(ZoneOffset.UTC), Duration.ofMinutes(108)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 7, 19), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1961, 4, 12, 7, 19), toLocalDateTime(res)); } @Test public void subdate_accepts_negative_interval() { var res = subdate(LocalDateTime.of(2020, 10, 20, 14, 42), Duration.ofDays(-10)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 10, 30, 14, 42), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(2020, 10, 30, 14, 42), toLocalDateTime(res)); assertEquals(adddate(LocalDateTime.of(2020, 10, 20, 14, 42), Duration.ofDays(10)), res); } @Test public void subdate_has_second_signature_but_not_date_sub() { var res = subdate(LocalDateTime.of(1961, 4, 12, 9, 7), 100500); - assertEquals(DATETIME, res.type()); + assertEquals(TIMESTAMP, res.type()); var exception = assertThrows( ExpressionEvaluationException.class, () -> date_sub(LocalDateTime.of(1961, 4, 12, 9, 7), 100500)); assertEquals( - "date_sub function expected {[DATE,INTERVAL],[DATETIME,INTERVAL]," - + "[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get [DATETIME,INTEGER]", + "date_sub function expected {[DATE,INTERVAL],[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get" + + " [TIMESTAMP,INTEGER]", exception.getMessage()); } @@ -148,23 +154,23 @@ public void subdate_returns_date_when_args_are_date_and_days() { @Test public void subdate_returns_datetime_when_args_are_date_but_days() { var res = subdate(LocalDate.of(2000, 1, 1).atStartOfDay(), 2); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1999, 12, 30, 0, 0), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1999, 12, 30, 0, 0), toLocalDateTime(res)); res = subdate(LocalTime.now(), 2); - assertEquals(DATETIME, res.type()); + assertEquals(TIMESTAMP, res.type()); assertEquals(today().minusDays(2), res.dateValue()); res = subdate(Instant.ofEpochSecond(42), 2); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1969, 12, 30, 0, 0, 42), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1969, 12, 30, 0, 0, 42), toLocalDateTime(res)); } @Test public void subdate_accepts_negative_days() { var res = subdate(LocalDateTime.of(2020, 10, 20, 8, 16, 32), -40); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 10, 20, 8, 16, 32).plusDays(40), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(2020, 10, 20, 8, 16, 32).plusDays(40), toLocalDateTime(res)); assertEquals(adddate(LocalDateTime.of(2020, 10, 20, 8, 16, 32), 40), res); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java index c2a6129626..d4ee7c44da 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java @@ -32,7 +32,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprLongValue; @@ -398,10 +397,6 @@ private static Stream getTestDataForDayOfYear() { return Stream.of( Arguments.of( DSL.literal(new ExprDateValue("2020-08-07")), "day_of_year(DATE '2020-08-07')", 220), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2020-08-07 12:23:34")), - "day_of_year(DATETIME '2020-08-07 12:23:34')", - 220), Arguments.of( DSL.literal(new ExprTimestampValue("2020-08-07 12:23:34")), "day_of_year(TIMESTAMP '2020-08-07 12:23:34')", @@ -519,11 +514,6 @@ private static Stream getTestDataForGetFormat() { Arguments.of("DATE", "ISO", "%Y-%m-%d"), Arguments.of("DATE", "EUR", "%d.%m.%Y"), Arguments.of("DATE", "INTERNAL", "%Y%m%d"), - Arguments.of("DATETIME", "USA", "%Y-%m-%d %H.%i.%s"), - Arguments.of("DATETIME", "JIS", "%Y-%m-%d %H:%i:%s"), - Arguments.of("DATETIME", "ISO", "%Y-%m-%d %H:%i:%s"), - Arguments.of("DATETIME", "EUR", "%Y-%m-%d %H.%i.%s"), - Arguments.of("DATETIME", "INTERNAL", "%Y%m%d%H%i%s"), Arguments.of("TIME", "USA", "%h:%i:%s %p"), Arguments.of("TIME", "JIS", "%H:%i:%s"), Arguments.of("TIME", "ISO", "%H:%i:%s"), @@ -572,11 +562,6 @@ public void hour() { assertEquals(integerValue(1), expression.valueOf()); assertEquals("hour(TIMESTAMP '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.hour(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(1), expression.valueOf()); - assertEquals("hour(DATETIME '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.hour(DSL.literal("2020-08-17 01:02:03")); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(1), expression.valueOf()); @@ -617,9 +602,7 @@ public void hourOfDay() { FunctionExpression expression2 = DSL.hour_of_day(DSL.literal("01:02:03")); FunctionExpression expression3 = DSL.hour_of_day(DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03"))); - FunctionExpression expression4 = - DSL.hour_of_day(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - FunctionExpression expression5 = DSL.hour_of_day(DSL.literal("2020-08-17 01:02:03")); + FunctionExpression expression4 = DSL.hour_of_day(DSL.literal("2020-08-17 01:02:03")); assertAll( () -> hourOfDayQuery(expression1, 1), @@ -629,9 +612,7 @@ public void hourOfDay() { () -> hourOfDayQuery(expression3, 1), () -> assertEquals("hour_of_day(TIMESTAMP '2020-08-17 01:02:03')", expression3.toString()), () -> hourOfDayQuery(expression4, 1), - () -> assertEquals("hour_of_day(DATETIME '2020-08-17 01:02:03')", expression4.toString()), - () -> hourOfDayQuery(expression5, 1), - () -> assertEquals("hour_of_day(\"2020-08-17 01:02:03\")", expression5.toString())); + () -> assertEquals("hour_of_day(\"2020-08-17 01:02:03\")", expression4.toString())); } private void invalidHourOfDayQuery(String time) { @@ -731,15 +712,10 @@ public void microsecond() { assertEquals(integerValue(120000), eval(expression)); assertEquals("microsecond(\"01:02:03.12\")", expression.toString()); - expression = DSL.microsecond(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03.000010"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(10), expression.valueOf()); - assertEquals("microsecond(DATETIME '2020-08-17 01:02:03.00001')", expression.toString()); - - expression = DSL.microsecond(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03.123456"))); + expression = DSL.microsecond(DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03.123456"))); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(123456), expression.valueOf()); - assertEquals("microsecond(DATETIME '2020-08-17 01:02:03.123456')", expression.toString()); + assertEquals("microsecond(TIMESTAMP '2020-08-17 01:02:03.123456')", expression.toString()); expression = DSL.microsecond(DSL.literal("2020-08-17 01:02:03.123456")); assertEquals(INTEGER, expression.type()); @@ -769,11 +745,6 @@ public void minute() { assertEquals(integerValue(2), expression.valueOf()); assertEquals("minute(TIMESTAMP '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.minute(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(2), expression.valueOf()); - assertEquals("minute(DATETIME '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.minute(DSL.literal("2020-08-17 01:02:03")); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(2), expression.valueOf()); @@ -803,11 +774,6 @@ public void minuteOfDay() { assertEquals(integerValue(62), expression.valueOf()); assertEquals("minute_of_day(TIMESTAMP '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.minute_of_day(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(62), expression.valueOf()); - assertEquals("minute_of_day(DATETIME '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.minute_of_day(DSL.literal("2020-08-17 01:02:03")); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(62), expression.valueOf()); @@ -833,10 +799,6 @@ private static Stream getTestDataForMinuteOfHour() { DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03")), 2, "minute_of_hour(TIMESTAMP '2020-08-17 01:02:03')"), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03")), - 2, - "minute_of_hour(DATETIME '2020-08-17 01:02:03')"), Arguments.of( DSL.literal("2020-08-17 01:02:03"), 2, "minute_of_hour(\"2020-08-17 01:02:03\")")); } @@ -894,10 +856,6 @@ private static Stream getTestDataForMonthOfYear() { return Stream.of( Arguments.of( DSL.literal(new ExprDateValue("2020-08-07")), "month_of_year(DATE '2020-08-07')", 8), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2020-08-07 12:23:34")), - "month_of_year(DATETIME '2020-08-07 12:23:34')", - 8), Arguments.of( DSL.literal(new ExprTimestampValue("2020-08-07 12:23:34")), "month_of_year(TIMESTAMP '2020-08-07 12:23:34')", @@ -1052,11 +1010,6 @@ public void second() { assertEquals(INTEGER, expression.type()); assertEquals(integerValue(3), expression.valueOf()); assertEquals("second(TIMESTAMP '2020-08-17 01:02:03')", expression.toString()); - - expression = DSL.second(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(3), expression.valueOf()); - assertEquals("second(DATETIME '2020-08-17 01:02:03')", expression.toString()); } private void secondOfMinuteQuery(FunctionExpression dateExpression, int second, String testExpr) { @@ -1075,11 +1028,7 @@ private static Stream getTestDataForSecondOfMinute() { Arguments.of( DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03")), 3, - "second_of_minute(TIMESTAMP '2020-08-17 01:02:03')"), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03")), - 3, - "second_of_minute(DATETIME '2020-08-17 01:02:03')")); + "second_of_minute(TIMESTAMP '2020-08-17 01:02:03')")); } @ParameterizedTest(name = "{2}") @@ -1253,10 +1202,6 @@ private void validateStringFormat( private static Stream getTestDataForWeekFormats() { return Stream.of( Arguments.of(DSL.literal(new ExprDateValue("2019-01-05")), "DATE '2019-01-05'", 0), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2019-01-05 01:02:03")), - "DATETIME '2019-01-05 01:02:03'", - 0), Arguments.of( DSL.literal(new ExprTimestampValue("2019-01-05 01:02:03")), "TIMESTAMP '2019-01-05 01:02:03'", diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java index d857122534..4bec093b57 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java @@ -7,7 +7,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.opensearch.sql.data.model.ExprValueUtils.nullValue; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.LocalDateTime; import java.time.ZoneId; @@ -15,7 +15,7 @@ import java.time.format.DateTimeFormatter; import java.util.TimeZone; import org.junit.jupiter.api.Test; -import org.opensearch.sql.data.model.ExprDatetimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.ExpressionTestBase; import org.opensearch.sql.expression.FunctionExpression; @@ -25,23 +25,23 @@ class DateTimeTest extends ExpressionTestBase { @Test public void noTimeZoneNoField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-15 22:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-15 22:00:00"), expr.valueOf()); } @Test public void positiveTimeZoneNoField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00+01:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-15 22:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-15 22:00:00"), expr.valueOf()); } @Test public void positiveField1WrittenField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00+01:00"), DSL.literal("America/Los_Angeles")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-15 14:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-15 14:00:00"), expr.valueOf()); } // When no timezone argument is passed inside the datetime field, it assumes local time. @@ -57,23 +57,23 @@ public void localDateTimeConversion() { .atZone(ZoneId.of(TimeZone.getDefault().getID())) .withZoneSameInstant(ZoneId.of(timeZone)); FunctionExpression expr = DSL.datetime(DSL.literal(dt), DSL.literal(timeZone)); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue(timeZoneLocal.toLocalDateTime()), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue(timeZoneLocal.toLocalDateTime()), expr.valueOf()); } @Test public void negativeField1WrittenField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-11:00"), DSL.literal("America/Los_Angeles")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-16 02:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-16 02:00:00"), expr.valueOf()); } @Test public void negativeField1PositiveField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-12:00"), DSL.literal("+15:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -81,7 +81,7 @@ public void negativeField1PositiveField2() { public void twentyFourHourDifference() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-14:00"), DSL.literal("+10:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -89,14 +89,14 @@ public void twentyFourHourDifference() { public void negativeToNull() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-11:00"), DSL.literal(nullValue())); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void invalidDate() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-04-31 22:00:00-11:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java index 023a3574aa..865c162f76 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java @@ -11,10 +11,10 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.temporal.Temporal; import java.util.List; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; @@ -91,7 +91,8 @@ protected Long datediff(Temporal first, Temporal second) { } protected LocalDateTime fromUnixTime(Double value) { - return fromUnixTime(DSL.literal(value)).valueOf().datetimeValue(); + return LocalDateTime.ofInstant( + fromUnixTime(DSL.literal(value)).valueOf().timestampValue(), ZoneOffset.UTC); } protected FunctionExpression fromUnixTime(Expression value) { @@ -109,7 +110,8 @@ protected FunctionExpression fromUnixTime(Expression value, Expression format) { } protected LocalDateTime fromUnixTime(Long value) { - return fromUnixTime(DSL.literal(value)).valueOf().datetimeValue(); + return LocalDateTime.ofInstant( + fromUnixTime(DSL.literal(value)).valueOf().timestampValue(), ZoneOffset.UTC); } protected String fromUnixTime(Long value, String format) { @@ -223,7 +225,7 @@ protected Double unixTimeStampOf(LocalDate value) { } protected Double unixTimeStampOf(LocalDateTime value) { - return unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value))).valueOf().doubleValue(); + return unixTimeStampOf(DSL.literal(new ExprTimestampValue(value))).valueOf().doubleValue(); } protected Double unixTimeStampOf(Instant value) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java index 820158b722..02d50d0b59 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java @@ -16,8 +16,8 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.Expression; @@ -72,12 +72,12 @@ private static Stream getDateResultsForExtractFunction() { }) public void testExtractWithDatetime(String part, long expected) { FunctionExpression datetimeExpression = - DSL.extract(DSL.literal(part), DSL.literal(new ExprDatetimeValue(datetimeInput))); + DSL.extract(DSL.literal(part), DSL.literal(new ExprTimestampValue(datetimeInput))); assertEquals(LONG, datetimeExpression.type()); assertEquals(expected, eval(datetimeExpression).longValue()); assertEquals( - String.format("extract(\"%s\", DATETIME '2023-02-11 10:11:12.123')", part), + String.format("extract(\"%s\", TIMESTAMP '2023-02-11 10:11:12.123')", part), datetimeExpression.toString()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java index 8fcc6904b2..a6d1da003f 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java @@ -46,7 +46,9 @@ public void checkOfLong(Long value) { LocalDateTime.of(1970, 1, 1, 0, 0, 0).plus(value, ChronoUnit.SECONDS), fromUnixTime(value)); assertEquals( LocalDateTime.of(1970, 1, 1, 0, 0, 0).plus(value, ChronoUnit.SECONDS), - eval(fromUnixTime(DSL.literal(new ExprLongValue(value)))).datetimeValue()); + LocalDateTime.ofInstant( + eval(fromUnixTime(DSL.literal(new ExprLongValue(value)))).timestampValue(), + ZoneOffset.UTC)); } private static Stream getDoubleSamples() { @@ -76,7 +78,9 @@ public void checkOfDouble(Double value) { valueAsString); assertEquals( LocalDateTime.ofEpochSecond(intPart, (int) Math.round(fracPart * 1E9), ZoneOffset.UTC), - eval(fromUnixTime(DSL.literal(new ExprDoubleValue(value)))).datetimeValue(), + LocalDateTime.ofInstant( + eval(fromUnixTime(DSL.literal(new ExprDoubleValue(value)))).timestampValue(), + ZoneOffset.UTC), valueAsString); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java index 0e5c00084f..8b795786c0 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java @@ -9,13 +9,13 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.TIME; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.temporal.ChronoUnit; import java.time.temporal.Temporal; @@ -45,7 +45,7 @@ class NowLikeFunctionTest extends ExpressionTestBase { void now() { test_now_like_functions( DSL::now, - DATETIME, + TIMESTAMP, false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @@ -54,7 +54,7 @@ void now() { void current_timestamp() { test_now_like_functions( DSL::current_timestamp, - DATETIME, + TIMESTAMP, false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @@ -63,7 +63,7 @@ void current_timestamp() { void localtimestamp() { test_now_like_functions( DSL::localtimestamp, - DATETIME, + TIMESTAMP, false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @@ -72,14 +72,14 @@ void localtimestamp() { void localtime() { test_now_like_functions( DSL::localtime, - DATETIME, + TIMESTAMP, false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @Test void sysdate() { - test_now_like_functions(DSL::sysdate, DATETIME, true, LocalDateTime::now); + test_now_like_functions(DSL::sysdate, TIMESTAMP, true, LocalDateTime::now); } @Test @@ -128,14 +128,14 @@ void utc_time() { @Test void utc_timestamp() { test_now_like_functions( - DSL::utc_timestamp, DATETIME, false, () -> utcDateTimeNow(functionProperties)); + DSL::utc_timestamp, TIMESTAMP, false, () -> utcDateTimeNow(functionProperties)); } private static LocalDateTime utcDateTimeNow(FunctionProperties functionProperties) { ZonedDateTime zonedDateTime = LocalDateTime.now(functionProperties.getQueryStartClock()) .atZone(TimeZone.getDefault().toZoneId()); - return zonedDateTime.withZoneSameInstant(UTC_ZONE_ID).toLocalDateTime(); + return zonedDateTime.withZoneSameInstant(ZoneOffset.UTC).toLocalDateTime(); } /** @@ -249,8 +249,8 @@ private Temporal extractValue(FunctionExpression func) { switch ((ExprCoreType) func.type()) { case DATE: return func.valueOf().dateValue(); - case DATETIME: - return func.valueOf().datetimeValue(); + case TIMESTAMP: + return LocalDateTime.ofInstant(func.valueOf().timestampValue(), ZoneOffset.UTC); case TIME: return func.valueOf().timeValue(); // unreachable code diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java index 42d4aab1f6..7f0861d9c3 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java @@ -6,21 +6,23 @@ package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import static org.opensearch.sql.data.type.ExprCoreType.UNDEFINED; +import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.util.stream.Stream; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.expression.DSL; @@ -34,23 +36,23 @@ private static Stream getTestDataForStrToDate() { return Stream.of( // Date arguments Arguments.of( - "01,5,2013", "%d,%m,%Y", new ExprDatetimeValue("2013-05-01 00:00:00"), DATETIME), + "01,5,2013", "%d,%m,%Y", new ExprTimestampValue("2013-05-01 00:00:00"), TIMESTAMP), Arguments.of( - "May 1, 2013", "%M %d, %Y", new ExprDatetimeValue("2013-05-01 00:00:00"), DATETIME), + "May 1, 2013", "%M %d, %Y", new ExprTimestampValue("2013-05-01 00:00:00"), TIMESTAMP), Arguments.of( "May 1, 2013 - 9,23,11", "%M %d, %Y - %h,%i,%s", - new ExprDatetimeValue("2013-05-01 09:23:11"), - DATETIME), + new ExprTimestampValue("2013-05-01 09:23:11"), + TIMESTAMP), Arguments.of( - "2000,1,1", "%Y,%m,%d", new ExprDatetimeValue("2000-01-01 00:00:00"), DATETIME), + "2000,1,1", "%Y,%m,%d", new ExprTimestampValue("2000-01-01 00:00:00"), TIMESTAMP), Arguments.of( - "2000,1,1,10", "%Y,%m,%d,%h", new ExprDatetimeValue("2000-01-01 10:00:00"), DATETIME), + "2000,1,1,10", "%Y,%m,%d,%h", new ExprTimestampValue("2000-01-01 10:00:00"), TIMESTAMP), Arguments.of( "2000,1,1,10,11", "%Y,%m,%d,%h,%i", - new ExprDatetimeValue("2000-01-01 10:11:00"), - DATETIME), + new ExprTimestampValue("2000-01-01 10:11:00"), + TIMESTAMP), // Invalid Arguments (should return null) Arguments.of("a09:30:17", "a%h:%i:%s", ExprNullValue.of(), UNDEFINED), @@ -108,20 +110,22 @@ public void test_str_to_date_with_time_type(String parsed, String format) { ExprValue result = eval(expression); - assertEquals(DATETIME, result.type()); - assertEquals(getExpectedTimeResult(9, 23, 11), result.datetimeValue()); + assertEquals(TIMESTAMP, result.type()); + assertEquals( + getExpectedTimeResult(9, 23, 11), + LocalDateTime.ofInstant(result.timestampValue(), ZoneOffset.UTC)); } @Test public void test_str_to_date_with_date_format() { - LocalDateTime arg = LocalDateTime.of(2023, 2, 27, 10, 11, 12); + Instant arg = Instant.parse("2023-02-27T10:11:12Z"); String format = "%Y,%m,%d %h,%i,%s"; FunctionExpression dateFormatExpr = DSL.date_format( functionProperties, - DSL.literal(new ExprDatetimeValue(arg)), + DSL.literal(new ExprTimestampValue(arg)), DSL.literal(new ExprStringValue(format))); String dateFormatResult = eval(dateFormatExpr).stringValue(); @@ -130,7 +134,7 @@ public void test_str_to_date_with_date_format() { functionProperties, DSL.literal(new ExprStringValue(dateFormatResult)), DSL.literal(new ExprStringValue(format))); - LocalDateTime strToDateResult = eval(strToDateExpr).datetimeValue(); + Instant strToDateResult = eval(strToDateExpr).timestampValue(); assertEquals(arg, strToDateResult); } @@ -156,7 +160,8 @@ public void test_str_to_date_with_time_format() { functionProperties, DSL.literal(new ExprStringValue(timeFormatResult)), DSL.literal(new ExprStringValue(format))); - LocalDateTime strToDateResult = eval(strToDateExpr).datetimeValue(); + LocalDateTime strToDateResult = + LocalDateTime.ofInstant(eval(strToDateExpr).timestampValue(), ZoneOffset.UTC); assertEquals(getExpectedTimeResult(HOURS, MINUTES, SECONDS), strToDateResult); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java index 243eb6bb7b..13f4f20704 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java @@ -18,7 +18,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; @@ -43,12 +42,6 @@ private static Stream getTestDataForTimestampAdd() { Arguments.of("MINUTE", 1, new ExprDateValue("2003-01-02"), "2003-01-02 00:01:00"), Arguments.of("WEEK", 1, new ExprDateValue("2003-01-02"), "2003-01-09 00:00:00"), - // Datetime - Arguments.of( - "MINUTE", 1, new ExprDatetimeValue("2003-01-02 00:00:00"), "2003-01-02 00:01:00"), - Arguments.of( - "WEEK", 1, new ExprDatetimeValue("2003-01-02 00:00:00"), "2003-01-09 00:00:00"), - // Timestamp Arguments.of( "MINUTE", 1, new ExprTimestampValue("2003-01-02 00:00:00"), "2003-01-02 00:01:00"), @@ -125,7 +118,7 @@ private static FunctionExpression timestampaddQuery( @MethodSource("getTestDataForTimestampAdd") public void testTimestampadd(String unit, int amount, ExprValue datetimeExpr, String expected) { FunctionExpression expr = timestampaddQuery(unit, amount, datetimeExpr); - assertEquals(new ExprDatetimeValue(expected), eval(expr)); + assertEquals(new ExprTimestampValue(expected), eval(expr)); } private static Stream getTestDataForTestAddingDatePartToTime() { @@ -165,7 +158,7 @@ public void testAddingDatePartToTime( LocalDateTime expected1 = LocalDateTime.of(expectedDate, LocalTime.parse(timeArg)); - assertEquals(new ExprDatetimeValue(expected1), eval(expr)); + assertEquals(new ExprTimestampValue(expected1), eval(expr)); } @Test @@ -184,7 +177,7 @@ public void testAddingTimePartToTime() { LocalDateTime expected = LocalDateTime.of(LocalDate.now(), LocalTime.parse(timeArg).plusMinutes(addedInterval)); - assertEquals(new ExprDatetimeValue(expected), eval(expr)); + assertEquals(new ExprTimestampValue(expected), eval(expr)); } @Test @@ -196,15 +189,11 @@ public void testDifferentInputTypesHaveSameResult() { FunctionExpression stringExpr = timestampaddQuery(part, amount, new ExprStringValue("2000-01-01 00:00:00")); - FunctionExpression datetimeExpr = - timestampaddQuery(part, amount, new ExprDatetimeValue("2000-01-01 00:00:00")); - FunctionExpression timestampExpr = timestampaddQuery(part, amount, new ExprTimestampValue("2000-01-01 00:00:00")); assertAll( () -> assertEquals(eval(dateExpr), eval(stringExpr)), - () -> assertEquals(eval(dateExpr), eval(datetimeExpr)), () -> assertEquals(eval(dateExpr), eval(timestampExpr))); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java index 061420ceee..b5ac3b078f 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java @@ -21,7 +21,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.model.ExprTimeValue; @@ -80,8 +79,6 @@ private static ExprValue generateArg( return new ExprTimestampValue(arg.toInstant(ZoneOffset.UTC)); case "DATE": return new ExprDateValue(arg.toLocalDate()); - case "DATETIME": - return new ExprDatetimeValue(arg); case "STRING": return new ExprStringValue( String.format( @@ -118,7 +115,7 @@ private static Stream getGeneralTestDataForTimestampDiff() { final String[] intervalTypes = ArrayUtils.addAll(timeIntervalTypes, dateIntervalTypes); // TIME type not included here as it is a special case handled by a different test - final String[] expressionTypes = {"DATE", "DATETIME", "TIMESTAMP", "STRING"}; + final String[] expressionTypes = {"DATE", "TIMESTAMP", "STRING"}; final LocalDateTime baseDateTime = LocalDateTime.of(2000, 1, 1, 0, 0, 0); final int intervalDifference = 5; @@ -159,30 +156,30 @@ private static Stream getCornerCaseTestDataForTimestampDiff() { // Test around Leap Year Arguments.of( "DAY", - new ExprDatetimeValue("2019-02-28 00:00:00"), - new ExprDatetimeValue("2019-03-01 00:00:00"), + new ExprTimestampValue("2019-02-28 00:00:00"), + new ExprTimestampValue("2019-03-01 00:00:00"), 1), Arguments.of( "DAY", - new ExprDatetimeValue("2020-02-28 00:00:00"), - new ExprDatetimeValue("2020-03-01 00:00:00"), + new ExprTimestampValue("2020-02-28 00:00:00"), + new ExprTimestampValue("2020-03-01 00:00:00"), 2), // Test around year change Arguments.of( "SECOND", - new ExprDatetimeValue("2019-12-31 23:59:59"), - new ExprDatetimeValue("2020-01-01 00:00:00"), + new ExprTimestampValue("2019-12-31 23:59:59"), + new ExprTimestampValue("2020-01-01 00:00:00"), 1), Arguments.of( "DAY", - new ExprDatetimeValue("2019-12-31 23:59:59"), - new ExprDatetimeValue("2020-01-01 00:00:00"), + new ExprTimestampValue("2019-12-31 23:59:59"), + new ExprTimestampValue("2020-01-01 00:00:00"), 0), Arguments.of( "DAY", - new ExprDatetimeValue("2019-12-31 00:00:00"), - new ExprDatetimeValue("2020-01-01 00:00:00"), + new ExprTimestampValue("2019-12-31 00:00:00"), + new ExprTimestampValue("2020-01-01 00:00:00"), 1)); } @@ -295,13 +292,6 @@ public void testDifferentInputTypesHaveSameResult() { new ExprStringValue("2000-01-01 00:00:00"), new ExprStringValue("2000-01-02 00:00:00")); - FunctionExpression datetimeExpr = - timestampdiffQuery( - functionProperties, - part, - new ExprDatetimeValue("2000-01-01 00:00:00"), - new ExprDatetimeValue("2000-01-02 00:00:00")); - FunctionExpression timestampExpr = timestampdiffQuery( functionProperties, @@ -311,7 +301,6 @@ public void testDifferentInputTypesHaveSameResult() { assertAll( () -> assertEquals(eval(dateExpr), eval(stringExpr)), - () -> assertEquals(eval(dateExpr), eval(datetimeExpr)), () -> assertEquals(eval(dateExpr), eval(timestampExpr))); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java index 7d25c0041b..5aebec9e78 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java @@ -8,12 +8,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.util.stream.Stream; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; @@ -39,7 +39,9 @@ public void timestamp_one_arg_string() { expr = DSL.timestamp(functionProperties, DSL.literal("1961-04-12 09:07:00.123456")); assertEquals(TIMESTAMP, expr.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 9, 7, 0, 123456000), expr.valueOf().datetimeValue()); + assertEquals( + LocalDateTime.of(1961, 4, 12, 9, 7, 0, 123456000), + expr.valueOf().timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } /** @@ -71,7 +73,8 @@ public void timestamp_one_arg_string_invalid_format(String value, String testNam public void timestamp_one_arg_time() { var expr = DSL.timestamp(functionProperties, DSL.time(DSL.literal("22:33:44"))); assertEquals(TIMESTAMP, expr.type()); - var refValue = LocalDate.now().atTime(LocalTime.of(22, 33, 44)).atZone(UTC_ZONE_ID).toInstant(); + var refValue = + LocalDate.now().atTime(LocalTime.of(22, 33, 44)).atZone(ZoneOffset.UTC).toInstant(); assertEquals(new ExprTimestampValue(refValue), expr.valueOf()); } @@ -79,17 +82,10 @@ public void timestamp_one_arg_time() { public void timestamp_one_arg_date() { var expr = DSL.timestamp(functionProperties, DSL.date(DSL.literal("2077-12-15"))); assertEquals(TIMESTAMP, expr.type()); - var refValue = LocalDate.of(2077, 12, 15).atStartOfDay().atZone(UTC_ZONE_ID).toInstant(); + var refValue = LocalDate.of(2077, 12, 15).atStartOfDay().atZone(ZoneOffset.UTC).toInstant(); assertEquals(new ExprTimestampValue(refValue), expr.valueOf()); } - @Test - public void timestamp_one_arg_datetime() { - var expr = DSL.timestamp(functionProperties, DSL.datetime(DSL.literal("1961-04-12 09:07:00"))); - assertEquals(TIMESTAMP, expr.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 9, 7, 0), expr.valueOf().datetimeValue()); - } - @Test public void timestamp_one_arg_timestamp() { var refValue = new ExprTimestampValue(Instant.ofEpochSecond(10050042)); @@ -100,7 +96,7 @@ public void timestamp_one_arg_timestamp() { } private static Instant dateTime2Instant(LocalDateTime dt) { - return dt.atZone(UTC_ZONE_ID).toInstant(); + return dt.atZone(ZoneOffset.UTC).toInstant(); } private static ExprTimestampValue dateTime2ExprTs(LocalDateTime dt) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java index 7aa824e61d..910fe42a52 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java @@ -20,7 +20,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprIntervalValue; import org.opensearch.sql.data.model.ExprLongValue; import org.opensearch.sql.data.model.ExprNullValue; @@ -52,7 +51,6 @@ private static Stream getTestDataForToSeconds() { Arguments.of(new ExprStringValue("2009-11-29 00:00:00"), new ExprLongValue(63426672000L)), Arguments.of(new ExprStringValue("2009-11-29 13:43:32"), new ExprLongValue(63426721412L)), Arguments.of(new ExprDateValue("2009-11-29"), new ExprLongValue(63426672000L)), - Arguments.of(new ExprDatetimeValue("2009-11-29 13:43:32"), new ExprLongValue(63426721412L)), Arguments.of( new ExprTimestampValue("2009-11-29 13:43:32"), new ExprLongValue(63426721412L))); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java index c979b68302..7373b126c5 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java @@ -20,7 +20,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprTimestampValue; @@ -81,7 +80,7 @@ public void checkOfDateTime(LocalDateTime value) { assertEquals(value.toEpochSecond(ZoneOffset.UTC), unixTimeStampOf(value)); assertEquals( value.toEpochSecond(ZoneOffset.UTC), - eval(unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value)))).longValue()); + eval(unixTimeStampOf(DSL.literal(new ExprTimestampValue(value)))).longValue()); } private static Stream getInstantSamples() { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java index c74b062fba..75aed94e03 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java @@ -6,17 +6,17 @@ package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.LocalDateTime; +import java.time.ZoneOffset; import java.util.stream.Stream; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprLongValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.expression.DSL; public class UnixTwoWayConversionTest extends DateTimeTestBase { @@ -28,11 +28,14 @@ public void checkConvertNow() { @Test public void checkConvertNow_with_eval() { - assertEquals(getExpectedNow(), eval(fromUnixTime(unixTimeStampExpr())).datetimeValue()); + assertEquals( + getExpectedNow(), + LocalDateTime.ofInstant( + eval(fromUnixTime(unixTimeStampExpr())).timestampValue(), ZoneOffset.UTC)); } private LocalDateTime getExpectedNow() { - return LocalDateTime.now(functionProperties.getQueryStartClock().withZone(UTC_ZONE_ID)) + return LocalDateTime.now(functionProperties.getQueryStartClock().withZone(ZoneOffset.UTC)) .withNano(0); } @@ -86,7 +89,9 @@ public void convertDateTime2Epoch2DateTime(LocalDateTime value) { assertEquals(value, fromUnixTime(unixTimeStampOf(value))); assertEquals( value, - eval(fromUnixTime(unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value))))) - .datetimeValue()); + LocalDateTime.ofInstant( + eval(fromUnixTime(unixTimeStampOf(DSL.literal(new ExprTimestampValue(value))))) + .timestampValue(), + ZoneOffset.UTC)); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java index 4f7208d141..3533886f9c 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java @@ -19,8 +19,8 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.exception.SemanticCheckException; import org.opensearch.sql.expression.DSL; @@ -137,7 +137,7 @@ public void yearweekModeInUnsupportedFormat() { FunctionExpression expression1 = DSL.yearweek( functionProperties, - DSL.literal(new ExprDatetimeValue("2019-01-05 10:11:12")), + DSL.literal(new ExprTimestampValue("2019-01-05 10:11:12")), DSL.literal(8)); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> eval(expression1)); @@ -146,7 +146,7 @@ public void yearweekModeInUnsupportedFormat() { FunctionExpression expression2 = DSL.yearweek( functionProperties, - DSL.literal(new ExprDatetimeValue("2019-01-05 10:11:12")), + DSL.literal(new ExprTimestampValue("2019-01-05 10:11:12")), DSL.literal(-1)); exception = assertThrows(SemanticCheckException.class, () -> eval(expression2)); assertEquals("mode:-1 is invalid, please use mode value between 0-7", exception.getMessage()); diff --git a/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java b/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java index 3ee12f59d4..237477050d 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java @@ -16,10 +16,10 @@ import static org.mockito.Mockito.when; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import static org.opensearch.sql.data.type.ExprCoreType.UNDEFINED; import static org.opensearch.sql.expression.function.BuiltinFunctionName.CAST_TO_BOOLEAN; @@ -126,7 +126,7 @@ void resolve_should_not_cast_arguments_in_cast_function() { FunctionImplementation function = repo.resolve( Collections.emptyList(), - registerFunctionResolver(CAST_TO_BOOLEAN.getName(), DATETIME, BOOLEAN)) + registerFunctionResolver(CAST_TO_BOOLEAN.getName(), TIMESTAMP, BOOLEAN)) .apply(functionProperties, ImmutableList.of(mockExpression)); assertEquals("cast_to_boolean(string)", function.toString()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java b/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java index 3b6e5f7586..9de1e65108 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -58,12 +57,8 @@ class WideningTypeRuleTest { .put(STRING, TIMESTAMP, 1) .put(STRING, DATE, 1) .put(STRING, TIME, 1) - .put(STRING, DATETIME, 1) - .put(DATE, DATETIME, 1) - .put(TIME, DATETIME, 1) - .put(DATE, TIMESTAMP, 2) - .put(TIME, TIMESTAMP, 2) - .put(DATETIME, TIMESTAMP, 1) + .put(DATE, TIMESTAMP, 1) + .put(TIME, TIMESTAMP, 1) .put(UNDEFINED, BYTE, 1) .put(UNDEFINED, SHORT, 2) .put(UNDEFINED, INTEGER, 3) diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java index 7803a4dbca..44a3ccabbd 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -27,7 +26,6 @@ import org.opensearch.sql.data.model.ExprBooleanValue; import org.opensearch.sql.data.model.ExprByteValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprFloatValue; import org.opensearch.sql.data.model.ExprIntegerValue; @@ -72,12 +70,8 @@ private static Stream timestamp() { return Stream.of(new ExprTimestampValue("2020-12-24 01:01:01")); } - private static Stream datetime() { - return Stream.of(new ExprDatetimeValue("2020-12-24 01:01:01")); - } - @ParameterizedTest(name = "castString({0})") - @MethodSource({"numberData", "stringData", "boolData", "date", "time", "timestamp", "datetime"}) + @MethodSource({"numberData", "stringData", "boolData", "date", "time", "timestamp"}) void castToString(ExprValue value) { FunctionExpression expression = DSL.castString(DSL.literal(value)); assertEquals(STRING, expression.type()); @@ -299,7 +293,7 @@ void castToDate() { assertEquals(DATE, expression.type()); assertEquals(new ExprDateValue("2012-08-07"), expression.valueOf()); - expression = DSL.castDate(DSL.literal(new ExprDatetimeValue("2012-08-07 01:01:01"))); + expression = DSL.castDate(DSL.literal(new ExprTimestampValue("2012-08-07 01:01:01"))); assertEquals(DATE, expression.type()); assertEquals(new ExprDateValue("2012-08-07"), expression.valueOf()); @@ -318,7 +312,7 @@ void castToTime() { assertEquals(TIME, expression.type()); assertEquals(new ExprTimeValue("01:01:01"), expression.valueOf()); - expression = DSL.castTime(DSL.literal(new ExprDatetimeValue("2012-08-07 01:01:01"))); + expression = DSL.castTime(DSL.literal(new ExprTimestampValue("2012-08-07 01:01:01"))); assertEquals(TIME, expression.type()); assertEquals(new ExprTimeValue("01:01:01"), expression.valueOf()); @@ -337,7 +331,7 @@ void castToTimestamp() { assertEquals(TIMESTAMP, expression.type()); assertEquals(new ExprTimestampValue("2012-08-07 01:01:01"), expression.valueOf()); - expression = DSL.castTimestamp(DSL.literal(new ExprDatetimeValue("2012-08-07 01:01:01"))); + expression = DSL.castTimestamp(DSL.literal(new ExprTimestampValue("2012-08-07 01:01:01"))); assertEquals(TIMESTAMP, expression.type()); assertEquals(new ExprTimestampValue("2012-08-07 01:01:01"), expression.valueOf()); @@ -345,19 +339,4 @@ void castToTimestamp() { assertEquals(TIMESTAMP, expression.type()); assertEquals(new ExprTimestampValue("2012-08-07 01:01:01"), expression.valueOf()); } - - @Test - void castToDatetime() { - FunctionExpression expression = DSL.castDatetime(DSL.literal("2012-08-07 01:01:01")); - assertEquals(DATETIME, expression.type()); - assertEquals(new ExprDatetimeValue("2012-08-07 01:01:01"), expression.valueOf()); - - expression = DSL.castDatetime(DSL.literal(new ExprTimestampValue("2012-08-07 01:01:01"))); - assertEquals(DATETIME, expression.type()); - assertEquals(new ExprDatetimeValue("2012-08-07 01:01:01"), expression.valueOf()); - - expression = DSL.castDatetime(DSL.literal(new ExprDateValue("2012-08-07"))); - assertEquals(DATETIME, expression.type()); - assertEquals(new ExprDatetimeValue("2012-08-07 00:00:00"), expression.valueOf()); - } } diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java index e6290553ce..55dfbd35c2 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java @@ -18,7 +18,6 @@ import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; @@ -472,11 +471,10 @@ private void assertStringRepr( if (v1.type() == v2.type()) { assertEquals(String.format("%s(%s, %s)", function, v1, v2), functionExpression.toString()); } else { - var widerType = v1.type() == TIMESTAMP || v2.type() == TIMESTAMP ? TIMESTAMP : DATETIME; assertEquals( String.format( "%s(%s, %s)", - function, getExpectedStringRepr(widerType, v1), getExpectedStringRepr(widerType, v2)), + function, getExpectedStringRepr(TIMESTAMP, v1), getExpectedStringRepr(TIMESTAMP, v2)), functionExpression.toString()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java b/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java index ac4153f59f..4b15704c77 100644 --- a/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java @@ -11,7 +11,6 @@ import java.time.Duration; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; import java.util.LinkedHashMap; import java.util.List; @@ -21,7 +20,6 @@ import org.opensearch.sql.data.model.ExprByteValue; import org.opensearch.sql.data.model.ExprCollectionValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprFloatValue; import org.opensearch.sql.data.model.ExprIntegerValue; @@ -49,7 +47,6 @@ void typeof() { assertEquals("BOOLEAN", typeofGetValue(ExprBooleanValue.of(false))); assertEquals("BYTE", typeofGetValue(new ExprByteValue(0))); assertEquals("DATE", typeofGetValue(new ExprDateValue(LocalDate.now()))); - assertEquals("DATETIME", typeofGetValue(new ExprDatetimeValue(LocalDateTime.now()))); assertEquals("DOUBLE", typeofGetValue(new ExprDoubleValue(0))); assertEquals("FLOAT", typeofGetValue(new ExprFloatValue(0))); assertEquals("INTEGER", typeofGetValue(new ExprIntegerValue(0))); diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java index 0f3f4bd61f..ee784045d0 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java @@ -10,7 +10,6 @@ import static org.hamcrest.Matchers.containsInRelativeOrder; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -25,7 +24,6 @@ import java.util.List; import org.junit.jupiter.api.Test; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; @@ -153,21 +151,21 @@ public void minute_span() { PhysicalPlan plan = new AggregationOperator( testScan(datetimeInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("minute", DATETIME)))), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("minute", TIMESTAMP)))), Collections.singletonList( - DSL.named("span", DSL.span(DSL.ref("minute", DATETIME), DSL.literal(5), "m")))); + DSL.named("span", DSL.span(DSL.ref("minute", TIMESTAMP), DSL.literal(5), "m")))); List result = execute(plan); assertEquals(3, result.size()); assertThat( result, containsInRelativeOrder( ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2020-12-31 23:50:00"), "count", 1)), + ImmutableMap.of("span", new ExprTimestampValue("2020-12-31 23:50:00"), "count", 1)), ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)), + ImmutableMap.of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 3)), ExprValueUtils.tupleValue( ImmutableMap.of( - "span", new ExprDatetimeValue("2021-01-01 00:05:00"), "count", 1)))); + "span", new ExprTimestampValue("2021-01-01 00:05:00"), "count", 1)))); plan = new AggregationOperator( @@ -296,23 +294,23 @@ public void month_span() { plan = new AggregationOperator( testScan(dateInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", DATETIME)))), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", TIMESTAMP)))), Collections.singletonList( - DSL.named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "M")))); + DSL.named("span", DSL.span(DSL.ref("quarter", TIMESTAMP), DSL.literal(2), "M")))); result = execute(plan); assertEquals(4, result.size()); assertThat( result, containsInRelativeOrder( ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2020-09-01 00:00:00"), "count", 1)), + ImmutableMap.of("span", new ExprTimestampValue("2020-09-01 00:00:00"), "count", 1)), ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2020-11-01 00:00:00"), "count", 1)), + ImmutableMap.of("span", new ExprTimestampValue("2020-11-01 00:00:00"), "count", 1)), ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 1)), + ImmutableMap.of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 1)), ExprValueUtils.tupleValue( ImmutableMap.of( - "span", new ExprDatetimeValue("2021-05-01 00:00:00"), "count", 2)))); + "span", new ExprTimestampValue("2021-05-01 00:00:00"), "count", 2)))); plan = new AggregationOperator( @@ -340,19 +338,19 @@ public void quarter_span() { PhysicalPlan plan = new AggregationOperator( testScan(dateInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", DATETIME)))), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", TIMESTAMP)))), Collections.singletonList( - DSL.named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "q")))); + DSL.named("span", DSL.span(DSL.ref("quarter", TIMESTAMP), DSL.literal(2), "q")))); List result = execute(plan); assertEquals(2, result.size()); assertThat( result, containsInRelativeOrder( ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2020-07-01 00:00:00"), "count", 2)), + ImmutableMap.of("span", new ExprTimestampValue("2020-07-01 00:00:00"), "count", 2)), ExprValueUtils.tupleValue( ImmutableMap.of( - "span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)))); + "span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 3)))); plan = new AggregationOperator( diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java index 003e59959f..6399f945ed 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java @@ -14,7 +14,6 @@ import java.util.List; import java.util.Map; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; @@ -109,35 +108,35 @@ public class PhysicalPlanTestBase { ImmutableMap.of( "day", new ExprDateValue("2021-01-03"), "month", new ExprDateValue("2021-02-04"), - "quarter", new ExprDatetimeValue("2021-01-01 12:25:02"), + "quarter", new ExprTimestampValue("2021-01-01 12:25:02"), "year", new ExprTimestampValue("2013-01-01 12:25:02")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "day", new ExprDateValue("2021-01-01"), "month", new ExprDateValue("2021-03-17"), - "quarter", new ExprDatetimeValue("2021-05-17 12:25:01"), + "quarter", new ExprTimestampValue("2021-05-17 12:25:01"), "year", new ExprTimestampValue("2021-01-01 12:25:02")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "day", new ExprDateValue("2021-01-04"), "month", new ExprDateValue("2021-02-08"), - "quarter", new ExprDatetimeValue("2021-06-08 12:25:02"), + "quarter", new ExprTimestampValue("2021-06-08 12:25:02"), "year", new ExprTimestampValue("2016-01-01 12:25:02")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "day", new ExprDateValue("2021-01-02"), "month", new ExprDateValue("2020-12-12"), - "quarter", new ExprDatetimeValue("2020-12-12 12:25:03"), + "quarter", new ExprTimestampValue("2020-12-12 12:25:03"), "year", new ExprTimestampValue("1999-01-01 12:25:02")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "day", new ExprDateValue("2021-01-01"), "month", new ExprDateValue("2021-02-28"), - "quarter", new ExprDatetimeValue("2020-09-28 12:25:01"), + "quarter", new ExprTimestampValue("2020-09-28 12:25:01"), "year", new ExprTimestampValue("2018-01-01 12:25:02")))) .build(); @@ -147,31 +146,31 @@ public class PhysicalPlanTestBase { ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("17:17:00"), - "minute", new ExprDatetimeValue("2020-12-31 23:54:12"), + "minute", new ExprTimestampValue("2020-12-31 23:54:12"), "second", new ExprTimestampValue("2021-01-01 00:00:05")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("18:17:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:05:12"), + "minute", new ExprTimestampValue("2021-01-01 00:05:12"), "second", new ExprTimestampValue("2021-01-01 00:00:12")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("17:15:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:03:12"), + "minute", new ExprTimestampValue("2021-01-01 00:03:12"), "second", new ExprTimestampValue("2021-01-01 00:00:17")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("19:01:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:02:12"), + "minute", new ExprTimestampValue("2021-01-01 00:02:12"), "second", new ExprTimestampValue("2021-01-01 00:00:03")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("18:50:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:00:12"), + "minute", new ExprTimestampValue("2021-01-01 00:00:12"), "second", new ExprTimestampValue("2021-01-01 00:00:13")))) .build(); diff --git a/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java b/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java index b25f4d1053..0d9fe80339 100644 --- a/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java +++ b/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java @@ -10,7 +10,7 @@ import static org.opensearch.sql.data.model.ExprValueUtils.getIntegerValue; import static org.opensearch.sql.data.model.ExprValueUtils.getLongValue; import static org.opensearch.sql.data.model.ExprValueUtils.getStringValue; -import static org.opensearch.sql.utils.DateTimeUtils.extractDateTime; +import static org.opensearch.sql.utils.DateTimeUtils.extractTimestamp; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.type.ExprCoreType; @@ -29,8 +29,8 @@ public static int compare(FunctionProperties functionProperties, ExprValue v1, E } else if (v1.isNull() || v2.isNull()) { throw new ExpressionEvaluationException("invalid to call compare operation on null value"); } else if (v1.type() != v2.type() && v1.isDateTime() && v2.isDateTime()) { - return extractDateTime(v1, functionProperties) - .compareTo(extractDateTime(v2, functionProperties)); + return extractTimestamp(v1, functionProperties) + .compareTo(extractTimestamp(v2, functionProperties)); } return compare(v1, v2); } @@ -67,8 +67,6 @@ public static int compare(ExprValue v1, ExprValue v2) { return v1.timeValue().compareTo(v2.timeValue()); case DATE: return v1.dateValue().compareTo(v2.dateValue()); - case DATETIME: - return v1.datetimeValue().compareTo(v2.datetimeValue()); case TIMESTAMP: return v1.timestampValue().compareTo(v2.timestampValue()); default: diff --git a/docs/dev/img/type-hierarchy-tree-final.png b/docs/dev/img/type-hierarchy-tree-final.png new file mode 100644 index 0000000000000000000000000000000000000000..883e581efbdbe3fc0f9b0a8902fa6187cb44988a GIT binary patch literal 30902 zcmeFa2UL?;_dXg#jU$f1GJuK!D>~9F2tsH!EFh>Th(fTUAc9H>B?%f)7(`SYP?4e{ zB1jVfr6eFt2PuLvLWCd@kWgY0LLec@J@1=<=*)b-|NY;)?z;D`Z(Vtsc zd{S9-#O3-oWyg?BxAlf9Uq1dteOigyi)jnf12JTiUr#TzwkTf`bv?}Vm;DaZw;7lF zQUXtl?+G>Iu@mxzOUbJzduT+fmf%b&c<|X{}OUC?KsoH*+L!H-r9M+G}j_YuGKrzSP1)Uahp;3`+R} zUz&%g=-gKM(Lbj@*K{ceBXoCB>-Q^t_S*mOtg)n!{v4lBU?zCCii!^|=zlNWhjqxP z;eWuJ(uF44PW~nNlvQqWv9K(%QR=XX_$sWAke;LTtPgP5l$W$ix|m1~VfcuhVtYM> zfh=B$=7V6Px%R&Y1;0z0DsO_w#!KYlseY zVKNCL`NgReI;*_n`{92l#L~5cyn9)yEgfZC$3yK}&Y85%-X{e`qYGoI)3ToQGX7jE z7wDwcl+b~~Ej}e)pRohoN*%fd2llu6JztAz99l1#nP)>gDEVP88u|;v2%;`()IOyS zjmN~7?7+A!0hT30r!TArFX8tFP>5GrQhIfy&Ks7#T8T zO(9FT5>M^-9-g%3F2>>JwB8Ki_q9EBNZf_hyW5nSf3SY5V?mfpabkX`fa}QL5L1tE z^o`S166W_F>SBAp%R1HSi@WvT+yB3<-cIe_6ZvKPQ6bYdm@kl)yIpfL-=;y=HC(=7 zHmo=67XYn&C5;o4vw#^D+%rftGcv>L;ZGNRy4^RtY2=^^P_Au>;&KxyV=k((x>yPP zy?qh%QH0}8=-MJa%mx|+5t0X8L4>Q${|kQQIy+ON&J6FpPE@%1?MHJm4{NU>74=r< zvel|SJ`B_$pZCUyVlN-QYAlHaqIJn(VRmucOs5&;`ZLPa&Z^fEu0KA32*I&PF=A#@ z{Q_~6twy4xKbKU|By|(!9=>WY6y+xMm9!BHS|s>#{i=B)tH;v5z93E=wr=lQgayVI z^Ez%&d&;F;oW3ZqZOX$Jl2?6uc~ffM^=xmOa@AFOa){e3=;-SNyQzuhxH@(y><&XJ zlfYc8FXHsIIUy^65wirlyWA3^7K5{x3+$aHxZ%;7=ovdPn$v-4R7N2}wkxA9FGDMb z;t;EWVk%u4hAf|r3fqVYQ3Ge;I=g~&I2^fo5!zw~xas=8bd+URC_Yd|X-)-4bbIoD zU;BS%ll7mwVKD)Vr(EpZT{4yNmebzaS<`4_lu&YuJ?ZO>?+3TKPa~f2P~yTi9`d^9 zQe%h=Tne3=o!(sTa??2|wN29%_=3RL8%H*gS2nDL4&qW^&UM3{JGfFu-E%`1a%Z|4 z!KcMPvdF7(Ehi+$$5d2$011Qwezr{Z?CrvP`1YrYKsJELY%FmF;%1F;wT`PZ^ZuYP z!xoa9`!4Ig*aR*|>*xErX&Ch)E^EmqGG8_kdRi{(o-cw4%TcQ8%z)?wAS?}j@D$W# zQ+Oqt!nBxrXxOV(vSBgsu*#Jk&B!|A5DqMP?<1}hZ?6a9l`4A_(hM@$Rjv_>1m^p= z!zfCy{}53^X%#U&WB*(O{6#O*8`zopK$zvyXsHc3kU>lB74(r zmt)EF&LZHY+v2@%Lc*>bfwNoB3Ax{|5ufm$cP92mW}?@0gnp(Fu?&)W&6p=uB#lqS z!oesqH(U@?O^TzDZ)B|zw11%LC5z3AefetxXfK6kZ7)aO*PzZg2l;XcEn5rOGHVPp8j4*+rZVQ)o zC=7ZvL3as(c=ZHj4`0tC%_TVfs7aA)Q8z17?WDL(^aIFn970U@k-guWrTFLg`|nBX zNHN$$IKq&n?*abVPWaJsM_<{a*I6X_qbX=qOI2DZ9zSF`KfTU)TVVyMGe>vpW}qWd z_Q|*DiJ7B(n_}xQ+gKJ2Do7QApsXd7ycXqZR3m>X-ON?~6s@D|)hp>n=@jN-4&rid zuQ_cy4JVlW`*9;lKJkF?&5~2vUkjgdhnPuEmmC>lt z7aKhnaFQf!O^4utJi&r@mzZUb)3U@Fm7#weR=ZzSM8&Nk)=Vlr1KZAuHB?-wpEU+O zJc66{d$lPC5z@N&y8t5wff07q3Wx0Kky)s}fTqC%h52#&gga!R0rZmJO~`C(s&JA( z@Y;X`!^AKB?=Z;!NA_k}CCMkmfi=u%#fWRHvJ%Ybeg_sY4bvB)5eJu{5!unOAC!}< zSiGm&eE5ohlIRkF9cA@U&J-0VajbYXht~jaOQ)m&Sh7BKnOk&-czEc^pwlOFB&&G3>XjsSYBNBW$&+<9#U3p&5TkA(Ul>`pyS)%fT4(8GR24^TQC?EXd zwxpX$O$-wQH(*22&~lcJd3jCC(1X2Y8R^(Xu~?$!lNOIrJN?ci&Pro-I5iEoR#Sv8X{N&Ysn+u?Vu^A%&9 zm!p^VZnUJ6SrVcS{*cSJg4PqCJ=`mPxeUTn`8fc8)H)W|kjTR|oQtuHtEnQ%batGE zUR8)Mr#(K-!932qzAE>(mb!b8XN)k}F86`Ue8Q39Cr2%4l>$3AbTXcwNSkuS28${m zzmoF9`Rah*Z{c*yL4%ecB;LWX^pt!mD$Uam~H!Y+KMVjUq-9ZXIw;8cV9FJ`Aa)2 zrI&^A$V4+Uv3N8=2aRZ;e0Nb{|8Vgf)MZ+Jn62&{l**Z04sn+wKOmh>c8W|a0U%RE zO{|xYp;AGT06eZQdHl15iNIwgeFY1AG$p|4>0^!GUKe$NA#P^gccu3CBChKmBnN-C zTH-`m&PgId2mi!HU*^K5oPi9FrZFeU{8WyM&eMbS(@u&^C&fl0J(WfyDoDwN{Udso zhkjE{<81quT5qk462cP4nzE@a*$vq37T7SQSYW<7l(=Hm(^Xo)XSX|z>gf*X%}22i zIV^7S%qbB^Ip-Tukn*!;kB>o7F@5OU`y)`Q1W$tO67Njs7CJ*LaOhPGlfAF%SI=L`r2c$C_fYmb|PTnQVp zJ>;*$U6*dVkk&2jr7i_g1<d7|@S6ToN0v{EQ7VQ+s{r{2QyJINVLVFF#3GO0ley`3oq+u3* z3D$HnaB_#vBp%veMCKh5-uBO_l0-Inywh04332v@qAAq1T|}paMH1GQ#T%j9QoG&p z{8L^uo7mHGg-3NVec{Vj>dg$g!F(tj_H6Ziu#=7ND4PjX@iKJ9Yy~~%=vNcnF!H6~Ii;WXo^zPGIpCS#LUmdV`9S1W*RT^EoOaWrhE-XT_#j?@5d`+( zj3gS;uaeQ6LUBQd`Tib+F|h_{fh1cVPM?T>#QZQYYf&g^9k*-g^M$?dtJj>8rV+0O z;EH+gz3erJpZu)zPd0Jh6&@{mvW<*+t+n~r4h0`A zw2#|N&)KN2dp$6z)px_PJvgqdXAhWV-c1ZWfVaMJxIbz`7}DBByc_~m6cZg6SCImb zG_MOdv<~faph53YcQE%LKi~tNz(EwwK^3mOvOXL+c*+yZZs+EYdARHL)oY_JUEm#! zSO2Y0d(?2DhVRQLVBZ!x5P99Q*@ba5pQuVtO*)Y9)V+84?R;DV|LSFyapW)0z_|#p zkssdJf*q(J4ZOW?qr_>)KCZ6P&)+cf8k>w860-DF7QdQVO{%&RS5xied@XlJ7?Khs zw3YVLBsD?ijs#QRc{G$;Vx50(V_Sc8b)(+AGezPh%&MNe;I;XWBq{X(geLfX!sHYiqA6GrtZ# z)|2El5sQO3?e8>1huR9f`*OL}n~JJPox7ch{ZX4XbMKf33G|+m*BeKf)OQxS`+8$~ z(Rp3W0lm(+XTS0lx^+40`#4>msdgaB`XxySM~<6DXsejT9IUt^w-t}$JH zIj8lkFwQM2>&#K<*TH_4-xaOgAR>WQk8w1rJEg8frQiZN!ydb7xazn+f z5g?%7g#mv~3I{noj9pRHd}gyhSNG%HT@+r@d@83GGrzCZ?B&ffv9HRmRgrjh-V$cq zPO0X3GO?0l`wJ&2aA4DD9qD=Ijx1Y_YYoe5wD^q*Rj;KEY1(&VF>UY<@6!BtwI-tGukCJ!LK`hL}IFhLc3DOI#-MJ|BX_1SM0QBL_(X-mNd%(T%1^c0gEta|$@vW3U;1gKuBe9zH zb5Kbp9O%z>pr?E>U+4eozV{lk=3&3l32ILLgMGASpWyd?!TAFo;VX7vlC*f+UjSbL zeHsHn5D&xWqtq@#SfQnVhh;R*BC*k{gtBd2s?BP2wrNM%wRF{N`NNhCC&y4kpi|ZX zIB_ZZmJR0PS^lK;UX9UzLcXc1eTQXph2mle682fI_15LDB1cQiNq~54gCNOlCTq*- zwq&hUU>6bVeurU#S$(ZW0l@@!Z~kibktxtBG#5j^fL@&gUTq4)!5|?H-W76c7V0h- zel~hw|5od>0<-MRn>R;$ zwg50!KN7?9U_8_VtK$WiV zgL$o&n)q0C-xH?)t^qNzWk_|MWZ$hR2l;tn$SLUwL=Iy4W_TqlAZxQc^o@rp$JHTn zE^2gkQ+m?9kxxE5Blcd$5uxSpI{Ls8Os1y%0D5hK0 z#B(`(T(@LS;^qUr;Yib2D7CwAMrD@0(&zGGO*3G3WG5wmW%#oXb5Mb65-+9SFbq=d zoKlH{(^XYJVAZv^n=aO7Eab%yxS&4{NHuKH#;-iV04mfpzO z*QR|fuk9L0%{V>BU3ZCEc8wwIH|^5+up}+>KR~Ke$fGhsG2I__RHgWivLN<8(d9M|7akW1gg>w%!)(&Q4kCD@Euvhd$T@?t%YsGp~@!r zT(R-8tI^TX*w=bUwaPM1-=o}@xS>65mD6EewAqV1(#7=Td9GZt(BTog#^)Z@FW{Gl z#vi1Wqvu>{_ZDUYXj^L!X2k(ve{N5Zxp;j`v65O@TV!-K;9VSsyvvKweYe06YRpYM z&3FLav3@L__(-y-T3$q8x=Q-4$p1iuyXPWDZIh z1yYS6APNQQC(H^H_V9~z_ftdp5<3(+EJ1Qn-vF950;Gag+t-(`qVm2C}9=_3z~x}pUHZyu|u z$GGNUVeJ@SC4&YOr)~t=ny3d~-w+taTZKtE(FoVZrM|oyC zM^oo@FEKC6jO--cfDJxF?T~(cB|Xh);|E3*5`Oh`(|B4$0JaSpEStD4XrwRbtje0< zt&3$R{KFn^i9&Wi9_=RfVEz3{(`sS_{v|H`i?N%Y;fBfzHD`t5^q-b6qfw6x>pIVV zJ0hXgF`MH&W7!O&k{CGo*$^+2HI$5wQ*$n-YDm0~^#*be4UPsCr}`XzVfY>q3A-;vy#+UCs|s-e58$*(Sgct` z;Mtj5+xncp;Idi7m8q&-H4)s-wbNAyQVZsgb5`u3L66joqIzKUKRXZiDY`8z1+W~f z%YQ!G*PQlIt6yRW7*}eP+EdZlOO;y)wBBW^;`#1M@9y)t+bT$B#}l|RSLLUp)IEoB z#PBJe_DZ~}s9&FNxsbmWS?Hqo@+-}54(ekv4^FaL0`ENhA4GRIvn-|3pALWSSOAgM z>)hp$8jkeI>49hc?O$e{%1Ls0?LQnpsXKzVZ@246p=wFn7V>3a7F8F(`P&&XHy6?p z5g*hrDAPMKbr3NhmA1uSs5jF=;x+XR*LVMx3&%Kx&RGS+-NBqC`<<;YN$IjoQq>Qq z2sA>lfm`vwN57h`Xm11V3HF03MF0w4_2`7xw>xZXeM`{UP4Y)76e~y^9SAg3RExrm z30a52M17{lfSjOS#MUL_Li3aI!o6@k3v$;z0A+1#K7!sTGw|YoTBgnmi-+C8U+VG%~G-@?{ZcpB{)4)gh zb2h3m$oFnm0$0XyuK`w8hzXa1Hi*FyCC9Gg&XC_Wb$E zs2A5+0k}tj=r;UNWwaJBhlkC6i9pU+(P(J_yHn>arZ>U&{gwr$ea-h%1a6X#vK?Nh z!?H~M`m5*gLYINzX4G%89pC%K3n$xSL6%k@nXC_<1P<~q6^gp$<7&->6S57dK&354 z&wZ<~a#k|-tB-eZ_Xf(xQ)^*|9OU3*KaXFvc$`HtDTL)Q3pKqfu}Qq^?M{wK^lDYp&P$P~k&{CmFu zOJ-D8Y(;8X)+WpfNB*dZ=5e1f_Y0iHG?^J}T#GaVDVXEuEp|*sR_Dgnb#*lF1UL8g zrnm~l9hzthO*f==N12`aaK&Quty&FW$#2tMy=Pqv2sbCr!#J?A3M}byBGY={on(jRl#4B9G1>~vC~^q#~);;Y2>tRRJoz82qRpQF4<>-8q?woJRR48H+L5}fnw)o}i>ND@{mibTep zq5PF1%E%~mq0t3fK(GzY*v1t7{^;J#D-3}DIr2V(<+n;PsL;iCq#jJEzjB{o^@q_y zAU}|f+b*t(mmysbsz{-vNFMR5qrc`i0<&y{t?{D$$yI06u9i%PMBi+d2l^t2LwZz= z7pgM?>h%0t-m14e2RJ7>eeuR89M{j*6(8vPMOd3yaeIwJR#st@4UacB40-4>a98;Y z{6J6-?aO?WN&!uL=vYQ?*&A+Zrg_wybU4|VNIz||;7GuGL0ncoH}I$iaCbM`)grG1 zxERU6)#o1IRW-BjzsFr4dZe-P=^4>$z11$GITRbg4S$;$T;c#zk__pR&&#vyD#mY>LvlS3SfN9XfyFNQqZ~c3Zvus1?y~=%8m-L?*N8%42Cw zutNsGK(QW0&a|9hX}U@VU4p6kL@emOAb3J3^?)%4Kp+1d2Cn#2WG1mo6NR4elyCo*$i$ z-^(|Yw3V0-{VDAm0JC^v^HA7k^r;BXbtI?nM_LKY+WhFc5Bnt!I!*=RfS8xkGxdt> zz2d3yhVx2CRgW%A3|`^2<^hN>0^cR*9`x1Bj>KhudpJ2mkkgO5?gK6(z>IEd48K*~ z(NVU%!?Ni;8Wf|{kpkAeHjEn#;;#pk_0dE4d(sCLLP_DL>RbEjOBeyAp#gF!Sn=1~ z4$GS}=QbWs*JW&l_&|1%R)h_E@h4ub2d<~RrK)~f!!@up)gR8%xKzu9T z_EUi7wHJCA#dbrZVkF&D06j%1#B z!ewLgwm+U9Fy{|oL2Smyc8{utVr{+in(UH7!1$4QI*m`Zf?3E75^S#au8g`OU;zT+ zp{;xj23ul9Ci{!N?11sKc=sg0eCY4@z!b#!PJT z-bk71m6_u2CN?$?+7ihU`kF*|HRJRAZfe`t09*7YaCQZ7@~-IP<&l3M4t6@BqkwfK zyq1ZE6!!N1Z4I#g`*|MACIX5ls4m{CU8+VSO?b^8e?*jO5Vm~ohfY$TNmhGIRySH3) z&lXFH1d>LLfKx*cV9g zBPsp@T9ZKHMm{IpPw#wm)eGNykjAZ1*g6(z9yc;76mV=c>J#eXDxV|-aYB@M5Mlz& zep2W*uM_~3mAve7E|meZDK|ST4cb=u1`pJ?;@NjpcQHh*3<@JKV9_Q_`$otgW|?yN za-i|Bd=!{H55@6sn7c0nYj?0GfJ|UYq61Vfgd_W&%f%9;c`^HKxL11SJ}*`R^8*BY zOMjIb8+6o~FeC`cpb!_n<0eopIN>X{rpQ%B@%M4d`SgXef`kJx7Zm8q1a*!n2#}S_ zC&Gk4yrQh;Rggel;NvNon`=D?bNz!1ccCZV+vxQUVC_>m5C(4q<+I-qj~$dR=N}$X`-j+$4PTcH|>;)=^#)x{VM3NLk0{hN-m%`gIlON0^xhKa7aYp$KRGSYQAJUn5weO3_` z|3wtk3Y?dbvUep>-EoShJ17k+Yu^!anof-GIE?OPPXQ5!VRy9kmEET8UMAO@&E}XM z-A2*wY6p4K8bSVcDMEtDTZaEd@zkF`tZFPOwY1i=9=!S}u;14NX85OyA2!$Y z1nJ2W;c_v6LACtk8VD!(8Wo<9;{zD99&@HHgOl6x6NK&B!A2PM83a?PTidfbbNR)r zPG|kwEQ|AE7-T5}AkqPC!vM@*nrQ0?7+ceE@`qSVLPKe#W3`sXUvc`kBLSX*2>r)r zgP>K;6F!tZb%F`(fy5VCMx_^z<0AbYbu-BLG8b$HZaK#{qS+ z@`1tB`tq1Oh}@*OVxw`xX#zad;=c_Qn2m#8n54dPu04Y%JueosOXmQ3`OmKmY%qfM z&+Zl|@S*Vp5(^8Chr9Z6%_e0yaZx|AZNejc*msA#w)2o;&aVYxqL}*;j|6zQUT`d;b>aPw8pD z+sg~2{fQgI8ZEkd$L$$X+b^+-S2s?a1t10psup+!m&_;HMMm$bj&BlC82OY-yLHfQ z)*8H;`~HA@h)!QVvgvVnwVkPetwTodm@oZSz-HJ+)8D20Qy5SZKqw&=L6j||S|~s< zD$hlaU2v03`*iC_bRh zmO@^~_tOJu$MfGD^Y1e+gqy`QGA(MP1p^W?*VSqK_v*LEgofmeRZiD`Sm< z>VJLW@(+MmX2170$MJdoFoCf91>I%;I?Pl~hzoGz<^VrPtO3QBV=5Vf(@T`09PF)H zk@*P1vc`VaUNFUvb{BC$Ld=-LFE~W&v^W4^qtm6<;APGZvz5 zj_6exn~m7D9osBBI>@h{AVKhmS8*2V|N7gFP%_(Y{ojP=r;Pzl%Xdxl^LG4b3xC{) zf0)p}zY;=~`REZL;J&O1p!pf$w4u3yJ#CXhuF&`U4{ly}d(P=i)g&N6QTw7tKf~(X z`JZ>owR(#wE*MJuZ~+C3E%~}C5toIg4$0A!^n0SR+H_kz7xt(SW_CUL+z&X*`&(%c zC+z*mg&igA#e%)gQfg6-ux^ zHw2N#I3*7yZO-=CHopVUSS4M#U*<;s-hJYaJUvGPa-!~jtAIr(&@;AD2TnSVKN!pZZ5nv@pT&`PWfx*27 zLuXcyYKpyR4-r5iy?Od^Z~m=>(PgXu<3VWTC%^EwoufMndUS*Y66ewIp^shge|{h+i3kd4-L0ajhj=O}lpSXn-mGMc@=LQv$qoGTHo!ajM|I`)iqjsvXtzdtL1eyKwk}#biWS(c6kemN zUI9;Nt{Q5-AV(d^>KBp=t4KZ!e$ZA^6$bXfY(R9F;E7e(nWC~lepSxN$Xht)8q%X4 z1m%e~E@ol=U1giVwVi$yxeEM@uEa>^)l&`7+U`Nb@EtNpYg|2T@~YpIOsdw6=*#bY z>NKo(s36Dn#c9lo)01EH^0(_H-#urQY{VdhGIbA8;=4uz)-N~61ckSp(Yb3b9_g^e>r+^zMV_@r?vq-+oSc3@t-1&7KKktJrfu{v0CPjzgC;)~mIE(om#U~Q{qUKmuE z=^<4;v!u&N+%qcTRyH3dOrK}D8gPuH$Gt|{i;OiBrF))rDvI|hh3peVXJT6&x)C`~ z$phSu``0GdB-)Nk{Fevp1#b$$Ci!{BCR$a*WSyeYX59b?AFT4HAnz`KH%!?!?&ZqkIo_fQd3^+k;Iu=YB9BzTY=$%vAT>coZA^Cqg24Z1H^#?O zR=NIkAMAk(o>!u~Lrwanu2o8vO3Yu1 z1d=|6u{esZt9KYAc6+H-oRHmPVHLE?m)kllecOlkIALOpB_*IPme_N_v*_t9P&7_3 zpr}t>Jr-zPP3`B~{WWX?G9--0Y_YUFks@N|*X*1p7-$VzPEiMVfDAH#WB_=pIJ?4; zh#~+)uGk#7)`lVKsWo6v+B)XJ%eIb`@D}?CASM=z;O+r32S~p2rgtbW0H5u`M!3&_ z%r?%o0z!HLR~)cKnrT0@DXC27-*@(seYM~hG|dmF7@1*fP+Bmb$wh5 zuVV1+=K6+4%88W3M=ofjl|R9>L?zvvk{8_E{6cgmSQWA8`%rYiMSdK3fdY}B0lJW! ztJIZMq!bB=(s7D1L;|`i>LBLJ{9^u02O?Tl@D~UY-^M|2QDbTB{%Ur@t1E8OrNdAR zH1U$geJBi2wT8feLw!)Rjo^&ER(TWtQ=CLWhkdbU^}Dq(128!v?9KQU0DE;2Oh$${ zSXe^_QLd=7=KwI`l#Fh2{}J}bBg>X zjy<~>OE(0KU-z2cWKX#bl5lI^ zRm1rhI8z&daXkd#X$O#f7ll)xJYl#Qlz@zqcDS z1W8D)mI{?}ZGj}4E*Cn&$bi0%?DFNXpc{O36UJ3b<}5593uu(BKqrAZW3@=YXwsK2 z_y=+`$VC9XpbZ!0yF#a3HolE%BklV2N>|3v1m&Gf_$@moyhT~YtC@f~DBHp{jaAFg zkTS@D5s)w_kd~>BF6hdF)@x(8^laJ}i<#F3Fl)`XVG{5V$|O7XF#%NGZBT&T&&;@4 zO(QVdP;@}`J~7g%KObff%H)v;c!jMYdb$4h2h4D)_g=9cBFBh(i6AS z0-#o45a&bI4uZEJgCnaE8}L_UKd$j?>V8cVzY$P;`pZGUp2wzEiaZveK=tiba^Q*y z7k#X5KnAVPJr2nAGpBfh_)tiWQRQ|YcCDNwJ;qjy+9#0KA`23Pkt}8u4X=aB3hJjb zL>*RUxBahx-auncHwnrgPWb~_@4&v1#fFMh067=r=2k637vp)X>c&^ZeVVC?8|mFo zyhL@i>51`IE*Q80wc~wu%n^Z#4^)<+8|HwT@}Km{?v)(ZCSR6@+eszGk$j$pL#m9I z`biUAS~=z)jjagOkV5PSUE4vR+OMWwf=X;C-tk-2j2MXzU!`POVL~E6wH1uY{;9qbAkjpcGwWjPk-(ly!PI3WX;jD zHz=LBh8KyQB9}153=qzRM9}}{4vf`!fPzzS7p#||ub%BH+c@pyy~yikSm9k(?ktsX zq(veWpQGeu8Af1Z!1V@ITFxqip0ope^L6*wUN6l#h_BI*9UYUd0wPAV%Yai1dj@7P zvpX#JIFpBn+fSO9+&3$CrJfYs15G5fAgB*Mwk>}k|Jh5>*trta^d#nM7;KASAKg1S zwfq%V{KKrkc>(=H-cw>u9&;MZV)k@cK65X_whmD{>vLAFznAzVdoEyACE3Cn3EDHI zxu{)@BBspEcMXJ4lO33Cv;%1BB*~2=J1P0%qh3=ka|V@cCFu@5?n#@43NZs|N(@@I zZ6P{q-c`>wMK|cl_&HxVveFiX(>KO+^B8yvGX}?(8jj4~GNvJaq^|ta5Pcf8F1vUe zVY>VG4OaZP7@YLDE&^410hlq_lld3_qvn!d^o8p2l69t_3`pTEarFnzn=9PqOW+Ee z@7Kz#U;HK-3uhjSn?b9A3Hkxb?N2bWgFV7Xku4ewi76rL&Ysl<#JVD1>UI;yuurJ9S^C6Grt3o`NY zHa5pqM~bx5D4DFGG0e@ZAdYGycSNxFDSh?<91vO00B4mCHEqIAR$2B1(t)%r z&jU1^T_XP*tckFcWt#{2{)dL=J4_ANh3YGz>Q9(EF>!yNfEw7NHk-qgQFW?}Rc8+N z6Ahe)NP5_kw&RCO`Y`Cz!#Nk6XrNH^{35-!^r#+>;A4vz-Hg>l$yj#_U2CYQE6zg3 z;+SX`5kP2*_xAIwuG3}=aKONx8UxS3Y(vGkN)D(I!d>;0mc!|MIYW3NDG z?-ti48vbZN0*7(;>KYS&JsQUZl%NQ{@nNQ8*tFdpmL(xBW+tv2tU&KswoCqS#BaT# z8dcK=J28%jzCG^W=Vk~N-^VrZo3#QBI3(Aw42TN|NO>$TAxN!{8^PA*i_3NFD@aSl zw0i#8dxtNn0V``RS(Qtiy$GG$&&rS7D=ixBCBV!l+YB-#wD&n7!&#-PM#`di$5q`y z&__*l2X+c_i`i=)0I+2bF&7ZQ@&P>~hn3WwUE`Jhh&X<&=AiC2uC4@SVh;6^N}7JY z=`Lcficfb<6xai!=&Ex zmWMi_pP&ALOn+dhUMb2(J$$Cw5}TfiWN z-CxLcoL+w58Zr+4a4Or4obAFhSN-A~ukxYCn2KKy7ftym>)Y@u+dk)y9@WF#j-HI7*P=T##_|!blO2w3w zdoZakMGjnz<`|eO`e)@h%Ll4jTzzF@F!WW6VjGk&tAb@FFwVmp^?2@RxQ(fwm+Grp{agKOR(X{t2*M1k4a3EdVn^?-PttpV~qUYP-^`WK<>y&EH9Es_pE z-|sKbcp!&W&abF|OF2KDr7DVo4Sq4%cTN*P2nlyImZBjNPnoP8Az<)?48swE$pFM9 z=($iEH^WsDk#o(~?3UqSIZXs~Nrv_n=t4udglinv0aaLI4lY#VGLGDUG9dZe;fky| zD=xuJBSBS(a^jfP0tHtfJO)p~#dQB$FVL~W;K+B^x{Rr)3dI$mFl;q|WMwbPU%?4U zfQxNl?WuSiVvM;DVcsX*E0A;){x;%2tgWN2rs7Sqb zCeImRIvdsUD0}3!sIl;@kMR6qbd`KV_{oqXwpVvv`ABaj^CJlA==EV``e|FZJKY#T zxv^Z`-@9{4diAq5!iBQH{MZ)CTlc(zQH-g%{>(3&LDN`(bR2{0q!li~KsC6J70}qA zw__p9chLN!&Q0U?-bFKHlTO!agE}NARyB^mFwO;8X|gixIk;ALR#p_rUUW*eoy<%nF^ed-3?t^OL>r~_mq7)0XQ4I{Ah-o3D0|IB?HN`x$-SSSDrzRd zCCZ3bfQsAtJVg8KD0DS$wj~mJ3mC(ypp;9Pnjj)3n03XLpB#$Xg?*B6#lOkP5P(Ra zSoYxb$=%2{rSbzv3wuz$tRK742(C`-!6xop2vDm)^AdCM6NdppNkFEf{AbZ-^WJLjDe7y-epmg(OvmbMkzwvjn~RFEv2Xa55NuIQ8@e*+A<5;C!ad zj42*cikTVHbm4s4rQMsukyOD&#ituPLH@Q>MONIlJJxYVYy$~@2u z2=oAup-ESPcJ7zeDbC@9+=O)o2!mb0Z%@nX?!u6_tf5>9D3Mn<3W~0SWHg%1pkL3J z7XTnF1U;7+EHvpc6*pC{mHsve;!JSCfBNK}b*U;KjRrAV+bXA}b+?az#=5lh?#~bv z_NS*2UpHga_NQG(TIK!>SHZ158^S?7BBaV|HFyu3Jpxq6UGmb8pc9iZz45R&f@z?x&khVjF?2>gdlKmdG-#yjMfIRNu4X+Iz2J}^tp-gN4!_S%_nAhno z8NTPTww4IcCLF3#4}!oX(C$UFGa!1_qwd9^d?Ns=)Pl<*fSK;Flv!1yCD37o^lvaq z5Q|50$RYY^KUhnUNSip1j|uMY0&ia9XHoQd3C4@-6-_MV{WcM9u;GxO_s3s;egS;m zrMo~$ugqyIYu2L`@;=?BeB2HlG=dHn#$JdA)xhuzH7|}?KLb#`HZB@Szl&oHJSUU@ zOqDe%kO|d=kifrNcaT(vAY_x@7cN^?gzw6g?+sVVi`?(*p!%pT<^6e;R9`CpwUxaC5nA?UxJY}3w0>xcCMmig38;~l(LJ+f=t5U*6v+x z3-O~+9r6-!Ys(o2M)#F==YP9c5Dr!S40xiMA` z>npxLvl!Z|3pnqWf*8*dt04|LhJ9YboKCHtdr@QLU8vBkUa`E<_wqlC_YBuC@@?Hm zD_gG&okQIlp^X7^^`%7J`}JM*1%Vw5U!(C}FW-t^ed(2Aexho< z*^AfR!IbaPGz+z&c{AuQk>p1`Fr|j;nU6;8kDsZHE*0gfp~lK^d+BBj+VTBbzqt!-&d#2?cjSgF1+2%=4Rsf!30SU zv$+W<8f8=<(4o(UIqjb<`=_9;E2nArp5wq;;%Xp&d$5{^bB54Mp~%nHTk-h8V_Zb@h754n9XB;q+zXpA~-_jOE!lJyN9l zB)#{e+c3?CxjnYhC}Ph5_KiO^E^~Zi)-%dm>NK~tw3)=R&u>^WV}!gC^aF%6u#dOD z_c{{{M7aiYvssFMi`u97v#?@t_G|tOQD8JV^r>=D6m6S*O2K9ny!T4|BimT*Yqj5j@z9P zpUA(?d{roVO8H=uc>bW=#6dm3mH{e}uzd}-e^!lj&7WSmT>Dlh{I^1%A}cf6-K^dl zihV*0e5)^=+!orKAubvM<;QCQ2QEf%;m-&`O)UL@Lh08gyff?*p|4dv#7QKtKrED9gI{jTsyWpM)E203g5d z{0-^re2l2$%^aH7C*m5op|dO@1HNnokR3{gAaRt8I$|x>4RU4VBYb@WI(XVPfo@qpRcidrA`T|~fxHEpiu}($nY_aV z0Qlj#0&P9f2m`eVLzN%{IB_1~IEy#11F=~Xtq+`}d^hRvpnqeK@NtxzZMXAPkR|!s zBU)QX?w^HJh*t@~{G`B{2@G>Hd`TcW^NaIoK!g2ZK=|rCnjSDgaZGeP5%Ew>KHkC})80lxr|1SKAT0!*yulrCKuB12apT1|XyC5X~80cRa}U9kHI#CJkIF@bPT z2XK>R4vK7;A3dXg{#i7r@9w2wg2jV+k7yklB>_UDXKd`6fmR+P zWh00-tm%R)yWitL{1GC6NO6MhYT=|;sZflzU22gaqXB?sMsSz~s0fC%Xz-O1A%aDq zYYa2p=iZUqTLZdt@~@mI9mtmTL1A6Mw>`O8z5!HDz8%%% z;D++Xs$vwzpZUA9cWJlmD>1Uc_tikHPnMddf)1i`d;-xo6As)=x1-!4Qj84EKopfj z6W?UpNV+?YeSCk%U{cCB0|xQ)uyEN7S-{4cW5nQlIn+QwTeitilr2%KB*(;xfF17x*-LY#S*RW9J)A*PRgkAVlccDxPfU{m&_B-aUC7VyO) zQ2SFwgi5WnJNjWW)aOS50eEDf;Xd@$E9c;t2K=9O9kBBz)FVT!Fud>p2Spt~Xc$8~ zTfng&g^K)T=)t_*fw~ZO_dMhUT*d@{3B*~bQC_3RSvIGnsg~xC;sq@ z@rq@{?FlhHpfoKK`#%8)n}lv3@+21$El7XMzwRWY^{}j|4*=JS_b$6~9%99V-ea6T z5Fvs(4CpIS#-I6n`uUfKFiMAT^&bzSo5qP$;G|^%^iA}8(38w=8sHCs`{Q>-7O2Za zLdJvo{*E5Yc=K$)A9(5RBOV#`DaogM&42XCeDh-X>sWq}`^QMX{@d8@I;k-ud7p4u zNc=#$=Ui`TdLJOaVh^%&%TviK$v4<1mI){>p9RvWv?xKAMuOxVxyTB(vOP9}HAf)` zdgRRhbJE_IQodiO1kkBQh0TJ_c&s@k3ZfM`96SY;OF{T4o)8x?LqH+Ckn+pLSsDXG z@kfp*>sl=V$ogZE?Rm>yypP(cYp?xZ-V|XojO%_w3GGZ%iD#s^p$~;;flG8=8d@Zc z2#($_GTfC_n?*Y(vUzw(gnblvSX!P-s;ar#95+6fG!d~#oF88$4mMl`ruIo1Xo8o5 z%mlH^p>4P_mRn zNe}_q^F{31?XIocpFMwlzt5NN^L+R|&%^WObDz)qRsSXm&fNf~JqImkI}{<7YsHf~ zB^Fdb9x#>;>Ny79p&*D26I&pt1qjoFl1^RG4=Q37UIW_`e2$t5>#|gtt6ayNprxPS zlo$8&G&>!ZI3fnvtaf+=q!A zN*7iDBd3xXwgP{v79CSQUE?M5Q^38kHK6yfwC6YOIz+Y=#(|p48<7rL4mdn=8Ms=3 zG7u=gjV?Lds}OXccdx?_3o3mib!)^4Fc@o03UImI5sU-hqecb85k=srr$)K{!O$72 zvmuD!P)(+NXk+C@W6@AO3riSkG);7)9vP_;EGr!F%it9 zyH_J_14n6QMbH$P_LiccdgTI zHTYqJf`!b=dl?QlkWC@IB^Diompg!1CiA09BpeYEF(ItPjN{!#$7OZ$Pnh0E&M^z^ zABZub!jF^zCcfXZZ;%f(5IeSP*v<@TH1dCJ<5`0%3@plB!wHqSC5gh!qsIYjxyu76WL~IS83- zd0pRpmkVjil(Asdi}^v;5>kP3Z6j;{X8qVFFKnT-$k1EV|;H(Y^WA z2rhx)uWRONikG`2qgOO)Nwxx6>Us_x_p4EEL9F&s1^d8PE=E6>=KE(luv>JAQs%q5{EJzA+tg7gyl;q?&ubZP}iga%ur4xim$f=7GU5Dse&_fAJ!F+twnzvRT|rZ>0?xI=m}^`}c2S zcOR!Ucy?w^Ho1^q^&CzCUP0LoSuR84jn!H_H`K=4JvMuQC<;>(y%oPBv~y{cSW6e- z;Vo8o0h-)&93~c*+%{x4V8=W6J$ZpZx-SQ&dFEdu{I~fB6@}Xh8q~ literal 0 HcmV?d00001 diff --git a/docs/user/dql/expressions.rst b/docs/user/dql/expressions.rst index af264b2f16..123bba046a 100644 --- a/docs/user/dql/expressions.rst +++ b/docs/user/dql/expressions.rst @@ -168,7 +168,7 @@ Here is an example for different type of comparison operators:: | True | True | False | True | False | False | +---------+----------+---------+----------+----------+---------+ -It is possible to compare datetimes. When comparing different datetime types, for example `DATE` and `TIME`, both converted to `DATETIME`. +It is possible to compare datetimes. When comparing different datetime types, for example `DATE` and `TIME`, both converted to `TIMESTAMP`. The following rule is applied on coversion: a `TIME` applied to today's date; `DATE` is interpreted at midnight. See example below:: os> SELECT current_time() > current_date() AS `now.time > today`, typeof(current_time()) AS `now.time.type`, typeof(current_date()) AS `now.date.type`; @@ -184,7 +184,7 @@ The following rule is applied on coversion: a `TIME` applied to today's date; `D +------------------+-----------------+------------+ | now.time = now | now.time.type | now.type | |------------------+-----------------+------------| - | True | TIME | DATETIME | + | True | TIME | TIMESTAMP | +------------------+-----------------+------------+ os> SELECT subtime(now(), current_time()) = current_date() AS `midnight = now.date`, typeof(subtime(now(), current_time())) AS `midnight.type`, typeof(current_date()) AS `now.date.type`; @@ -192,7 +192,7 @@ The following rule is applied on coversion: a `TIME` applied to today's date; `D +-----------------------+-----------------+-----------------+ | midnight = now.date | midnight.type | now.date.type | |-----------------------+-----------------+-----------------| - | True | DATETIME | DATE | + | True | TIMESTAMP | DATE | +-----------------------+-----------------+-----------------+ diff --git a/docs/user/dql/functions.rst b/docs/user/dql/functions.rst index 19260e8bea..5af21df2bf 100644 --- a/docs/user/dql/functions.rst +++ b/docs/user/dql/functions.rst @@ -1145,15 +1145,15 @@ Description Usage: adddate(date, INTERVAL expr unit)/ adddate(date, expr) adds the time interval of second argument to date; adddate(date, days) adds the second argument as integer number of days to date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL/LONG +Argument type: DATE/TIMESTAMP/TIME, INTERVAL/LONG Return type map: -(DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME +(DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP (DATE, LONG) -> DATE -(DATETIME/TIMESTAMP/TIME, LONG) -> DATETIME +(TIMESTAMP/TIME, LONG) -> TIMESTAMP Synonyms: `DATE_ADD`_ when invoked with the INTERVAL form of the second argument. @@ -1178,13 +1178,13 @@ Description Usage: addtime(expr1, expr2) adds expr2 to expr1 and returns the result. If argument is TIME, today's date is used; if argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type map: -(DATE/DATETIME/TIMESTAMP, DATE/DATETIME/TIMESTAMP/TIME) -> DATETIME +(DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> TIMESTAMP -(TIME, DATE/DATETIME/TIMESTAMP/TIME) -> TIME +(TIME, DATE/TIMESTAMP/TIME) -> TIME Antonyms: `SUBTIME`_ @@ -1222,7 +1222,7 @@ Example:: | 10:26:12 | +---------------------------+ - os> SELECT ADDTIME(TIMESTAMP('2007-02-28 10:20:30'), DATETIME('2002-03-04 20:40:50')) AS `'2007-02-28 10:20:30' + '20:40:50'` + os> SELECT ADDTIME(TIMESTAMP('2007-02-28 10:20:30'), TIMESTAMP('2002-03-04 20:40:50')) AS `'2007-02-28 10:20:30' + '20:40:50'` fetched rows / total rows = 1/1 +--------------------------------------+ | '2007-02-28 10:20:30' + '20:40:50' | @@ -1237,11 +1237,11 @@ CONVERT_TZ Description >>>>>>>>>>> -Usage: convert_tz(datetime, from_timezone, to_timezone) constructs a datetime object converted from the from_timezone to the to_timezone. +Usage: convert_tz(timestamp, from_timezone, to_timezone) constructs a timestamp object converted from the from_timezone to the to_timezone. -Argument type: DATETIME, STRING, STRING +Argument type: TIMESTAMP, STRING, STRING -Return type: DATETIME +Return type: TIMESTAMP Example:: @@ -1262,7 +1262,7 @@ Example:: | 2010-10-09 23:10:10 | +---------------------------------------------------------+ -When the datedate, or either of the two time zone fields are invalid format, then the result is null. In this example any datetime that is not will result in null. +When the datedate, or either of the two time zone fields are invalid format, then the result is null. In this example any timestamp that is not will result in null. Example:: os> SELECT CONVERT_TZ("test", "+01:00", "-10:00") @@ -1273,7 +1273,7 @@ Example:: | null | +------------------------------------------+ -When the datetime, or either of the two time zone fields are invalid format, then the result is null. In this example any timezone that is not <+HH:mm> or <-HH:mm> will result in null. +When the timestamp, or either of the two time zone fields are invalid format, then the result is null. In this example any timezone that is not <+HH:mm> or <-HH:mm> will result in null. Example:: os> SELECT CONVERT_TZ("2010-10-10 10:10:10", "test", "-10:00") @@ -1440,9 +1440,9 @@ DATE Description >>>>>>>>>>> -Usage: date(expr) constructs a date type with the input string expr as a date. If the argument is of date/datetime/timestamp, it extracts the date value part from the expression. +Usage: date(expr) constructs a date type with the input string expr as a date. If the argument is of date/timestamp, it extracts the date value part from the expression. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: DATE @@ -1463,15 +1463,15 @@ DATETIME Description >>>>>>>>>>> -Usage: datetime(datetime)/ datetime(date, to_timezone) Converts the datetime to a new timezone +Usage: datetime(timestamp)/ datetime(timestamp, to_timezone) Converts the timestamp to a new timezone -Argument type: DATETIME/STRING +Argument type: TIMESTAMP/STRING Return type map: -(DATETIME, STRING) -> DATETIME +(TIMESTAMP, STRING) -> TIMESTAMP -(DATETIME) -> DATETIME +(TIMESTAMP) -> TIMESTAMP Example:: @@ -1560,9 +1560,9 @@ Description Usage: date_add(date, INTERVAL expr unit) adds the interval expr to date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL +Argument type: DATE/TIMESTAMP/TIME, INTERVAL -Return type: DATETIME +Return type: TIMESTAMP Synonyms: `ADDDATE`_ @@ -1663,7 +1663,7 @@ If an argument of type TIME is provided, the local date is used. * - x - x, for any smallcase/uppercase alphabet except [aydmshiHIMYDSEL] -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP, STRING +Argument type: STRING/DATE/TIME/TIMESTAMP, STRING Return type: STRING @@ -1686,9 +1686,9 @@ Description Usage: date_sub(date, INTERVAL expr unit) subtracts the interval expr from date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL +Argument type: DATE/TIMESTAMP/TIME, INTERVAL -Return type: DATETIME +Return type: TIMESTAMP Synonyms: `SUBDATE`_ @@ -1710,7 +1710,7 @@ DATEDIFF Usage: Calculates the difference of date parts of the given values. If the first argument is time, today's date is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type: LONG @@ -1733,7 +1733,7 @@ Description Usage: day(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1758,7 +1758,7 @@ Description Usage: dayname(date) returns the name of the weekday for date, including Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: STRING @@ -1781,7 +1781,7 @@ Description Usage: dayofmonth(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1806,7 +1806,7 @@ Description Usage: day_of_month(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/TIME/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1833,7 +1833,7 @@ Usage: dayofweek(date) returns the weekday index for date (1 = Sunday, 2 = Monda The `day_of_week` function is also provided as an alias. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -1858,7 +1858,7 @@ Usage: dayofyear(date) returns the day of the year for date, in the range 1 to If an argument of type `TIME` is given, the function will use the current date. The function `day_of_year`_ is also provided as an alias. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1872,14 +1872,6 @@ Example:: | 239 | +---------------------------------+ - os> SELECT DAYOFYEAR(DATETIME('2020-08-26 00:00:00')) - fetched rows / total rows = 1/1 - +----------------------------------------------+ - | DAYOFYEAR(DATETIME('2020-08-26 00:00:00')) | - |----------------------------------------------| - | 239 | - +----------------------------------------------+ - os> SELECT DAYOFYEAR(TIMESTAMP('2020-08-26 00:00:00')) fetched rows / total rows = 1/1 +-----------------------------------------------+ @@ -1898,7 +1890,7 @@ Description If an argument of type `TIME` is given, the function will use the current date. This function is an alias to the `dayofyear`_ function -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1912,14 +1904,6 @@ Example:: | 239 | +-----------------------------------+ - os> SELECT DAY_OF_YEAR(DATETIME('2020-08-26 00:00:00')) - fetched rows / total rows = 1/1 - +------------------------------------------------+ - | DAY_OF_YEAR(DATETIME('2020-08-26 00:00:00')) | - |------------------------------------------------| - | 239 | - +------------------------------------------------+ - os> SELECT DAY_OF_YEAR(TIMESTAMP('2020-08-26 00:00:00')) fetched rows / total rows = 1/1 +-------------------------------------------------+ @@ -2030,7 +2014,7 @@ FROM_UNIXTIME Description >>>>>>>>>>> -Usage: Returns a representation of the argument given as a datetime or character string value. Perform reverse conversion for `UNIX_TIMESTAMP`_ function. +Usage: Returns a representation of the argument given as a timestamp or character string value. Perform reverse conversion for `UNIX_TIMESTAMP`_ function. If second argument is provided, it is used to format the result in the same way as the format string used for the `DATE_FORMAT`_ function. If timestamp is outside of range 1970-01-01 00:00:00 - 3001-01-18 23:59:59.999999 (0 to 32536771199.999999 epoch time), function returns NULL. @@ -2038,7 +2022,7 @@ Argument type: DOUBLE, STRING Return type map: -DOUBLE -> DATETIME +DOUBLE -> TIMESTAMP DOUBLE, STRING -> STRING @@ -2070,7 +2054,7 @@ Description Usage: Returns a string value containing string format specifiers based on the input arguments. Argument type: TYPE, STRING -TYPE must be one of the following tokens: [DATE, TIME, DATETIME, TIMESTAMP]. +TYPE must be one of the following tokens: [DATE, TIME, TIMESTAMP]. STRING must be one of the following tokens: ["USA", "JIS", "ISO", "EUR", "INTERNAL"] (" can be replaced by '). Examples:: @@ -2093,7 +2077,7 @@ Description Usage: hour(time) extracts the hour value for time. Different from the time of day value, the time value has a large range and can be greater than 23, so the return value of hour(time) can be also greater than 23. The function `hour_of_day` is also provided as an alias. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2113,7 +2097,7 @@ LAST_DAY Usage: Returns the last day of the month as a DATE for a valid argument. -Argument type: DATE/DATETIME/STRING/TIMESTAMP/TIME +Argument type: DATE/STRING/TIMESTAMP/TIME Return type: DATE @@ -2238,9 +2222,9 @@ MICROSECOND Description >>>>>>>>>>> -Usage: microsecond(expr) returns the microseconds from the time or datetime expression expr as a number in the range from 0 to 999999. +Usage: microsecond(expr) returns the microseconds from the time or timestamp expression expr as a number in the range from 0 to 999999. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2264,7 +2248,7 @@ Description Usage: minute(time) returns the minute for time, in the range 0 to 59. The `minute_of_hour` function is provided as an alias. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2287,7 +2271,7 @@ Description Usage: minute_of_day(time) returns the minute value for time within a 24 hour day, in the range 0 to 1439. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2312,7 +2296,7 @@ Usage: month(date) returns the month for date, in the range 1 to 12 for January If an argument of type `TIME` is given, the function will use the current date. The function `month_of_year` is also provided as an alias. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -2344,7 +2328,7 @@ Description Usage: monthname(date) returns the full name of the month for date. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: STRING @@ -2368,9 +2352,9 @@ Description Returns the current date and time as a value in 'YYYY-MM-DD hh:mm:ss' format. The value is expressed in the cluster time zone. `NOW()` returns a constant time that indicates the time at which the statement began to execute. This differs from the behavior for `SYSDATE() <#sysdate>`_, which returns the exact time at which it executes. -Return type: DATETIME +Return type: TIMESTAMP -Specification: NOW() -> DATETIME +Specification: NOW() -> TIMESTAMP Example:: @@ -2437,7 +2421,7 @@ Description Usage: quarter(date) returns the quarter of the year for date, in the range 1 to 4. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -2503,7 +2487,7 @@ Description Usage: second(time) returns the second for time, in the range 0 to 59. The function `second_of_minute` is provided as an alias -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2532,14 +2516,14 @@ STR_TO_DATE Description >>>>>>>>>>> -Usage: str_to_date(string, string) is used to extract a DATETIME from the first argument string using the formats specified in the second argument string. -The input argument must have enough information to be parsed as a DATE, DATETIME, or TIME. +Usage: str_to_date(string, string) is used to extract a TIMESTAMP from the first argument string using the formats specified in the second argument string. +The input argument must have enough information to be parsed as a DATE, TIMESTAMP, or TIME. Acceptable string format specifiers are the same as those used in the `DATE_FORMAT`_ function. -It returns NULL when a statement cannot be parsed due to an invalid pair of arguments, and when 0 is provided for any DATE field. Otherwise, it will return a DATETIME with the parsed values (as well as default values for any field that was not parsed). +It returns NULL when a statement cannot be parsed due to an invalid pair of arguments, and when 0 is provided for any DATE field. Otherwise, it will return a TIMESTAMP with the parsed values (as well as default values for any field that was not parsed). Argument type: STRING, STRING -Return type: DATETIME +Return type: TIMESTAMP Example:: @@ -2561,15 +2545,15 @@ Description Usage: subdate(date, INTERVAL expr unit) / subdate(date, days) subtracts the time interval expr from date; subdate(date, days) subtracts the second argument as integer number of days from date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL/LONG +Argument type: DATE/TIMESTAMP/TIME, INTERVAL/LONG Return type map: -(DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME +(DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP (DATE, LONG) -> DATE -(DATETIME/TIMESTAMP/TIME, LONG) -> DATETIME +(TIMESTAMP/TIME, LONG) -> TIMESTAMP Synonyms: `DATE_SUB`_ when invoked with the INTERVAL form of the second argument. @@ -2594,13 +2578,13 @@ Description Usage: subtime(expr1, expr2) subtracts expr2 from expr1 and returns the result. If argument is TIME, today's date is used; if argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type map: -(DATE/DATETIME/TIMESTAMP, DATE/DATETIME/TIMESTAMP/TIME) -> DATETIME +(DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> TIMESTAMP -(TIME, DATE/DATETIME/TIMESTAMP/TIME) -> TIME +(TIME, DATE/TIMESTAMP/TIME) -> TIME Antonyms: `ADDTIME`_ @@ -2638,7 +2622,7 @@ Example:: | 10:14:48 | +---------------------------+ - os> SELECT SUBTIME(TIMESTAMP('2007-03-01 10:20:30'), DATETIME('2002-03-04 20:40:50')) AS `'2007-03-01 10:20:30' - '20:40:50'` + os> SELECT SUBTIME(TIMESTAMP('2007-03-01 10:20:30'), TIMESTAMP('2002-03-04 20:40:50')) AS `'2007-03-01 10:20:30' - '20:40:50'` fetched rows / total rows = 1/1 +--------------------------------------+ | '2007-03-01 10:20:30' - '20:40:50' | @@ -2659,9 +2643,9 @@ If the argument is given, it specifies a fractional seconds precision from 0 to Optional argument type: INTEGER -Return type: DATETIME +Return type: TIMESTAMP -Specification: SYSDATE([INTEGER]) -> DATETIME +Specification: SYSDATE([INTEGER]) -> TIMESTAMP Example:: @@ -2680,9 +2664,9 @@ TIME Description >>>>>>>>>>> -Usage: time(expr) constructs a time type with the input string expr as a time. If the argument is of date/datetime/time/timestamp, it extracts the time value part from the expression. +Usage: time(expr) constructs a time type with the input string expr as a time. If the argument is of date/time/timestamp, it extracts the time value part from the expression. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: TIME @@ -2706,7 +2690,7 @@ Usage: time_format(time, format) formats the time argument using the specifiers This supports a subset of the time format specifiers available for the `date_format`_ function. Using date format specifiers supported by `date_format`_ will return 0 or null. Acceptable format specifiers are listed in the table below. -If an argument of type DATE is passed in, it is treated as a DATETIME at midnight (i.e., 00:00:00). +If an argument of type DATE is passed in, it is treated as a TIMESTAMP at midnight (i.e., 00:00:00). .. list-table:: The following table describes the available specifier arguments. :widths: 20 80 @@ -2736,7 +2720,7 @@ If an argument of type DATE is passed in, it is treated as a DATETIME at midnigh - Time, 24-hour (hh:mm:ss) -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP, STRING +Argument type: STRING/DATE/TIME/TIMESTAMP, STRING Return type: STRING @@ -2759,7 +2743,7 @@ Description Usage: time_to_sec(time) returns the time argument, converted to seconds. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: LONG @@ -2804,15 +2788,15 @@ Description >>>>>>>>>>> Usage: timestamp(expr) constructs a timestamp type with the input string `expr` as an timestamp. If the argument is not a string, it casts `expr` to timestamp type with default timezone UTC. If argument is a time, it applies today's date before cast. -With two arguments `timestamp(expr1, expr2)` adds the time expression `expr2` to the date or datetime expression `expr1` and returns the result as a timestamp value. +With two arguments `timestamp(expr1, expr2)` adds the time expression `expr2` to the date or timestamp expression `expr1` and returns the result as a timestamp value. -Argument type: STRING/DATE/TIME/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type map: -(STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP +(STRING/DATE/TIME/TIMESTAMP) -> TIMESTAMP -(STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP +(STRING/DATE/TIME/TIMESTAMP, STRING/DATE/TIME/TIMESTAMP) -> TIMESTAMP Example:: @@ -2831,11 +2815,11 @@ TIMESTAMPADD Description >>>>>>>>>>> -Usage: Returns a DATETIME value based on a passed in DATE/DATETIME/TIME/TIMESTAMP/STRING argument and an INTERVAL and INTEGER argument which determine the amount of time to be added. -If the third argument is a STRING, it must be formatted as a valid DATETIME. If only a TIME is provided, a DATETIME is still returned with the DATE portion filled in using the current date. -If the third argument is a DATE, it will be automatically converted to a DATETIME. +Usage: Returns a TIMESTAMP value based on a passed in DATE/TIME/TIMESTAMP/STRING argument and an INTERVAL and INTEGER argument which determine the amount of time to be added. +If the third argument is a STRING, it must be formatted as a valid TIMESTAMP. If only a TIME is provided, a TIMESTAMP is still returned with the DATE portion filled in using the current date. +If the third argument is a DATE, it will be automatically converted to a TIMESTAMP. -Argument type: INTERVAL, INTEGER, DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: INTERVAL, INTEGER, DATE/TIME/TIMESTAMP/STRING INTERVAL must be one of the following tokens: [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] Examples:: @@ -2856,11 +2840,11 @@ Description >>>>>>>>>>> Usage: TIMESTAMPDIFF(interval, start, end) returns the difference between the start and end date/times in interval units. -If a TIME is provided as an argument, it will be converted to a DATETIME with the DATE portion filled in using the current date. -Arguments will be automatically converted to a DATETIME/TIME/TIMESTAMP when appropriate. -Any argument that is a STRING must be formatted as a valid DATETIME. +If a TIME is provided as an argument, it will be converted to a TIMESTAMP with the DATE portion filled in using the current date. +Arguments will be automatically converted to a TIME/TIMESTAMP when appropriate. +Any argument that is a STRING must be formatted as a valid TIMESTAMP. -Argument type: INTERVAL, DATE/DATETIME/TIME/TIMESTAMP/STRING, DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: INTERVAL, DATE/TIME/TIMESTAMP/STRING, DATE/TIME/TIMESTAMP/STRING INTERVAL must be one of the following tokens: [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] Examples:: @@ -2882,7 +2866,7 @@ Description Usage: to_days(date) returns the day number (the number of days since year 0) of the given date. Returns NULL if date is invalid. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: LONG @@ -2906,7 +2890,7 @@ Description Usage: to_seconds(date) returns the number of seconds since the year 0 of the given value. Returns NULL if value is invalid. An argument of a LONG type can be used. It must be formatted as YMMDD, YYMMDD, YYYMMDD or YYYYMMDD. Note that a LONG type argument cannot have leading 0s as it will be parsed using an octal numbering system. -Argument type: STRING/LONG/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/LONG/DATE/TIME/TIMESTAMP Return type: LONG @@ -2928,11 +2912,11 @@ Description >>>>>>>>>>> Usage: Converts given argument to Unix time (seconds since January 1st, 1970 at 00:00:00 UTC). If no argument given, it returns current Unix time. -The date argument may be a DATE, DATETIME, or TIMESTAMP string, or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. If the argument includes a time part, it may optionally include a fractional seconds part. +The date argument may be a DATE, TIMESTAMP, or TIMESTAMP string, or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. If the argument includes a time part, it may optionally include a fractional seconds part. If argument is in invalid format or outside of range 1970-01-01 00:00:00 - 3001-01-18 23:59:59.999999 (0 to 32536771199.999999 epoch time), function returns NULL. You can use `FROM_UNIXTIME`_ to do reverse conversion. -Argument type: /DOUBLE/DATE/DATETIME/TIMESTAMP +Argument type: /DOUBLE/DATE/TIMESTAMP Return type: DOUBLE @@ -3009,9 +2993,9 @@ Description Returns the current UTC timestamp as a value in 'YYYY-MM-DD hh:mm:ss'. -Return type: DATETIME +Return type: TIMESTAMP -Specification: UTC_TIMESTAMP() -> DATETIME +Specification: UTC_TIMESTAMP() -> TIMESTAMP Example:: @@ -3075,7 +3059,7 @@ The functions `weekofyear` and `week_of_year` is also provided as an alias. - 1-53 - with a Monday in this year -Argument type: DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: DATE/TIME/TIMESTAMP/STRING Return type: INTEGER @@ -3100,7 +3084,7 @@ Usage: weekday(date) returns the weekday index for date (0 = Monday, 1 = Tuesday It is similar to the `dayofweek`_ function, but returns different indexes for each day. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -3124,7 +3108,7 @@ Description The week_of_year function is a synonym for the `week`_ function. If an argument of type `TIME` is given, the function will use the current date. -Argument type: DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: DATE/TIME/TIMESTAMP/STRING Return type: INTEGER @@ -3148,7 +3132,7 @@ Description The weekofyear function is a synonym for the `week`_ function. If an argument of type `TIME` is given, the function will use the current date. -Argument type: DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: DATE/TIME/TIMESTAMP/STRING Return type: INTEGER @@ -3171,7 +3155,7 @@ Description Usage: year(date) returns the year for date, in the range 1000 to 9999, or 0 for the “zero” date. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -3194,7 +3178,7 @@ Description Usage: yearweek(date) returns the year and week for date as an integer. It accepts and optional mode arguments aligned with those available for the `WEEK`_ function. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -4514,6 +4498,6 @@ Example:: +----------------+---------------+-----------------+------------------+ | typeof(date) | typeof(int) | typeof(now()) | typeof(column) | |----------------+---------------+-----------------+------------------| - | DATE | INTEGER | DATETIME | OBJECT | + | DATE | INTEGER | TIMESTAMP | OBJECT | +----------------+---------------+-----------------+------------------+ diff --git a/docs/user/general/datatypes.rst b/docs/user/general/datatypes.rst index a265ffd4c9..c423bd7b10 100644 --- a/docs/user/general/datatypes.rst +++ b/docs/user/general/datatypes.rst @@ -40,8 +40,6 @@ The OpenSearch SQL Engine support the following data types. +---------------------+ | timestamp | +---------------------+ -| datetime | -+---------------------+ | date | +---------------------+ | date_nanos | @@ -128,53 +126,51 @@ Type Conversion Matrix The following matrix illustrates the conversions allowed by our query engine for all the built-in data types as well as types provided by OpenSearch storage engine. -+--------------+------------------------------------------------+---------+------------------------------+-----------------------------------------------+--------------------------+---------------------+ -| Data Types | Numeric Type Family | BOOLEAN | String Type Family | Datetime Type Family | OpenSearch Type Family | Complex Type Family | -| +------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| | BYTE | SHORT | INTEGER | LONG | FLOAT | DOUBLE | BOOLEAN | TEXT_KEYWORD | TEXT | STRING | TIMESTAMP | DATE | TIME | DATETIME | INTERVAL | GEO_POINT | IP | BINARY | STRUCT | ARRAY | -+==============+======+=======+=========+======+=======+========+=========+==============+======+========+===========+======+======+==========+==========+===========+=====+========+===========+=========+ -| UNDEFINED | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| BYTE | N/A | IE | IE | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| SHORT | E | N/A | IE | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| INTEGER | E | E | N/A | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| LONG | E | E | E | N/A | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| FLOAT | E | E | E | E | N/A | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| DOUBLE | E | E | E | E | E | N/A | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| BOOLEAN | E | E | E | E | E | E | N/A | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| TEXT_KEYWORD | | | | | | | | N/A | | IE | | | | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| TEXT | | | | | | | | | N/A | IE | | | | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| STRING | E | E | E | E | E | E | IE | X | X | N/A | IE | IE | IE | IE | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| TIMESTAMP | X | X | X | X | X | X | X | X | X | E | N/A | IE | IE | IE | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| DATE | X | X | X | X | X | X | X | X | X | E | E | N/A | IE | E | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| TIME | X | X | X | X | X | X | X | X | X | E | E | E | N/A | E | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| DATETIME | X | X | X | X | X | X | X | X | X | E | E | E | E | N/A | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| INTERVAL | X | X | X | X | X | X | X | X | X | E | X | X | X | X | N/A | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| GEO_POINT | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | N/A | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| IP | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | N/A | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| BINARY | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | N/A | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| STRUCT | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | X | N/A | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| ARRAY | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | X | X | N/A | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ ++--------------+------------------------------------------------+---------+------------------------------+------------------------------------+--------------------------+---------------------+ +| Data Types | Numeric Type Family | BOOLEAN | String Type Family | Datetime Type Family | OpenSearch Type Family | Complex Type Family | +| +------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| | BYTE | SHORT | INTEGER | LONG | FLOAT | DOUBLE | BOOLEAN | TEXT_KEYWORD | TEXT | STRING | TIMESTAMP | DATE | TIME | INTERVAL | GEO_POINT | IP | BINARY | STRUCT | ARRAY | ++==============+======+=======+=========+======+=======+========+=========+==============+======+========+===========+======+======+==========+===========+=====+========+===========+=========+ +| UNDEFINED | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| BYTE | N/A | IE | IE | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| SHORT | E | N/A | IE | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| INTEGER | E | E | N/A | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| LONG | E | E | E | N/A | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| FLOAT | E | E | E | E | N/A | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| DOUBLE | E | E | E | E | E | N/A | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+--------------+-----------+---------+ +| BOOLEAN | E | E | E | E | E | E | N/A | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| TEXT_KEYWORD | | | | | | | | N/A | | IE | | | | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| TEXT | | | | | | | | | N/A | IE | | | | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| STRING | E | E | E | E | E | E | IE | X | X | N/A | IE | IE | IE | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| TIMESTAMP | X | X | X | X | X | X | X | X | X | E | N/A | IE | IE | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| DATE | X | X | X | X | X | X | X | X | X | E | E | N/A | IE | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| TIME | X | X | X | X | X | X | X | X | X | E | E | E | N/A | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| INTERVAL | X | X | X | X | X | X | X | X | X | E | X | X | X | N/A | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| GEO_POINT | X | X | X | X | X | X | X | X | X | | X | X | X | X | N/A | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| IP | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | N/A | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| BINARY | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | N/A | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| STRUCT | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | N/A | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| ARRAY | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | X | N/A | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ Note that: @@ -236,7 +232,7 @@ Numeric values ranged from -2147483648 to +2147483647 are recognized as integer Date and Time Data Types ======================== -The datetime types supported by the SQL plugin are ``DATE``, ``TIME``, ``DATETIME``, ``TIMESTAMP``, and ``INTERVAL``, with date and time being used to represent temporal values. By default, the OpenSearch DSL uses ``date`` type as the only date and time related type as it contains all information about an absolute time point. To integrate with SQL language each of the types other than timestamp hold part of the temporal or timezone information. This information can be used to explicitly clarify the date and time types reflected in the datetime functions (see `Functions `_ for details), where some functions might have restrictions in the input argument type. +The datetime types supported by the SQL plugin are ``DATE``, ``TIME``, ``TIMESTAMP``, and ``INTERVAL``, with date and time being used to represent temporal values. By default, the OpenSearch DSL uses ``date`` type as the only date and time related type as it contains all information about an absolute time point. To integrate with SQL language each of the types other than timestamp hold part of the temporal or timezone information. This information can be used to explicitly clarify the date and time types reflected in the datetime functions (see `Functions `_ for details), where some functions might have restrictions in the input argument type. Date ---- @@ -262,19 +258,6 @@ Time represents the time on the clock or watch with no regard for which timezone +------+-----------------------+----------------------------------------------+ -Datetime --------- - -Datetime type is the combination of date and time. The conversion rule of date or time to datetime is described in `Conversion between date and time types`_. Datetime type does not contain timezone information. For an absolute time point that contains both date time and timezone information, see `Timestamp`_. - -+----------+----------------------------------+--------------------------------------------------------------------+ -| Type | Syntax | Range | -+==========+==================================+====================================================================+ -| Datetime | 'yyyy-MM-dd hh:mm:ss[.fraction]' | '0001-01-01 00:00:00.000000000' to '9999-12-31 23:59:59.999999999' | -+----------+----------------------------------+--------------------------------------------------------------------+ - - - Timestamp --------- @@ -304,16 +287,14 @@ The expr is any expression that can be iterated to a quantity value eventually, Conversion between date and time types -------------------------------------- -Basically the date and time types except interval can be converted to each other, but might suffer some alteration of the value or some information loss, for example extracting the time value from a datetime value, or convert a date value to a datetime value and so forth. Here lists the summary of the conversion rules that SQL plugin supports for each of the types: +Basically the date and time types except interval can be converted to each other, but might suffer some alteration of the value or some information loss, for example extracting the time value from a timestamp value, or convert a date value to a timestamp value and so forth. Here lists the summary of the conversion rules that SQL plugin supports for each of the types: Conversion from DATE >>>>>>>>>>>>>>>>>>>> - Since the date value does not have any time information, conversion to `Time`_ type is not useful, and will always return a zero time value '00:00:00'. -- Conversion from date to datetime has a data fill-up due to the lack of time information, and it attaches the time '00:00:00' to the original date by default and forms a datetime instance. For example, the result to covert date '2020-08-17' to datetime type is datetime '2020-08-17 00:00:00'. - -- Conversion to timestamp is to alternate both the time value and the timezone information, and it attaches the zero time value '00:00:00' and the session timezone (UTC by default) to the date. For example, the result to covert date '2020-08-17' to datetime type with session timezone UTC is datetime '2020-08-17 00:00:00' UTC. +- Conversion to timestamp is to alternate both the time value and the timezone information, and it attaches the zero time value '00:00:00' and the session timezone (UTC by default) to the date. For example, the result to covert date '2020-08-17' to timestamp type with session timezone UTC is timestamp '2020-08-17 00:00:00' UTC. Conversion from TIME @@ -322,20 +303,10 @@ Conversion from TIME - When time value is converted to any other datetime types, the date part of the new value is filled up with today's date, like with the `CURDATE` function. For example, a time value X converted to a timestamp would produce today's date at time X. -Conversion from DATETIME ->>>>>>>>>>>>>>>>>>>>>>>> - -- Conversion from datetime to date is to extract the date part from the datetime value. For example, the result to convert datetime '2020-08-17 14:09:00' to date is date '2020-08-08'. - -- Conversion to time is to extract the time part from the datetime value. For example, the result to convert datetime '2020-08-17 14:09:00' to time is time '14:09:00'. - -- Since the datetime type does not contain timezone information, the conversion to timestamp needs to fill up the timezone part with the session timezone. For example, the result to convert datetime '2020-08-17 14:09:00' with system timezone of UTC, to timestamp is timestamp '2020-08-17 14:09:00' UTC. - - Conversion from TIMESTAMP >>>>>>>>>>>>>>>>>>>>>>>>> -- Conversion from timestamp is much more straightforward. To convert it to date is to extract the date value, and conversion to time is to extract the time value. Conversion to datetime, it will extracts the datetime value and leave the timezone information over. For example, the result to convert datetime '2020-08-17 14:09:00' UTC to date is date '2020-08-17', to time is '14:09:00' and to datetime is datetime '2020-08-17 14:09:00'. +- Conversion from timestamp is much more straightforward. To convert it to date is to extract the date value, and conversion to time is to extract the time value. For example, the result to convert timestamp '2020-08-17 14:09:00' UTC to date is date '2020-08-17', to time is '14:09:00'. Conversion from string to date and time types --------------------------------------------- diff --git a/docs/user/ppl/functions/datetime.rst b/docs/user/ppl/functions/datetime.rst index f7c4091753..9e75e41136 100644 --- a/docs/user/ppl/functions/datetime.rst +++ b/docs/user/ppl/functions/datetime.rst @@ -17,15 +17,15 @@ Description Usage: adddate(date, INTERVAL expr unit) / adddate(date, days) adds the interval of second argument to date; adddate(date, days) adds the second argument as integer number of days to date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL/LONG +Argument type: DATE/TIMESTAMP/TIME, INTERVAL/LONG Return type map: -(DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME +(DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP (DATE, LONG) -> DATE -(DATETIME/TIMESTAMP/TIME, LONG) -> DATETIME +(TIMESTAMP/TIME, LONG) -> TIMESTAMP Synonyms: `DATE_ADD`_ when invoked with the INTERVAL form of the second argument. @@ -51,13 +51,13 @@ Description Usage: addtime(expr1, expr2) adds expr2 to expr1 and returns the result. If argument is TIME, today's date is used; if argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type map: -(DATE/DATETIME/TIMESTAMP, DATE/DATETIME/TIMESTAMP/TIME) -> DATETIME +(DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> TIMESTAMP -(TIME, DATE/DATETIME/TIMESTAMP/TIME) -> TIME +(TIME, DATE/TIMESTAMP/TIME) -> TIME Antonyms: `SUBTIME`_ @@ -95,7 +95,7 @@ Example:: | 10:26:12 | +---------------------------+ - os> source=people | eval `'2007-02-28 10:20:30' + '20:40:50'` = ADDTIME(TIMESTAMP('2007-02-28 10:20:30'), DATETIME('2002-03-04 20:40:50')) | fields `'2007-02-28 10:20:30' + '20:40:50'` + os> source=people | eval `'2007-02-28 10:20:30' + '20:40:50'` = ADDTIME(TIMESTAMP('2007-02-28 10:20:30'), TIMESTAMP('2002-03-04 20:40:50')) | fields `'2007-02-28 10:20:30' + '20:40:50'` fetched rows / total rows = 1/1 +--------------------------------------+ | '2007-02-28 10:20:30' + '20:40:50' | @@ -110,13 +110,13 @@ CONVERT_TZ Description >>>>>>>>>>> -Usage: convert_tz(datetime, from_timezone, to_timezone) constructs a local datetime converted from the from_timezone to the to_timezone. CONVERT_TZ returns null when any of the three function arguments are invalid, i.e. datetime is not in the format yyyy-MM-dd HH:mm:ss or the timeszone is not in (+/-)HH:mm. It also is invalid for invalid dates, such as February 30th and invalid timezones, which are ones outside of -13:59 and +14:00. +Usage: convert_tz(timestamp, from_timezone, to_timezone) constructs a local timestamp converted from the from_timezone to the to_timezone. CONVERT_TZ returns null when any of the three function arguments are invalid, i.e. timestamp is not in the format yyyy-MM-dd HH:mm:ss or the timeszone is not in (+/-)HH:mm. It also is invalid for invalid dates, such as February 30th and invalid timezones, which are ones outside of -13:59 and +14:00. -Argument type: DATETIME, STRING, STRING +Argument type: TIMESTAMP, STRING, STRING -Return type: DATETIME +Return type: TIMESTAMP -Conversion from +00:00 timezone to +10:00 timezone. Returns the datetime argument converted from +00:00 to +10:00 +Conversion from +00:00 timezone to +10:00 timezone. Returns the timestamp argument converted from +00:00 to +10:00 Example:: os> source=people | eval `convert_tz('2008-05-15 12:00:00','+00:00','+10:00')` = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields `convert_tz('2008-05-15 12:00:00','+00:00','+10:00')` @@ -349,9 +349,9 @@ DATE Description >>>>>>>>>>> -Usage: date(expr) constructs a date type with the input string expr as a date. If the argument is of date/datetime/timestamp, it extracts the date value part from the expression. +Usage: date(expr) constructs a date type with the input string expr as a date. If the argument is of date/timestamp, it extracts the date value part from the expression. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: DATE @@ -398,9 +398,9 @@ Description Usage: date_add(date, INTERVAL expr unit) adds the interval expr to date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL +Argument type: DATE/TIMESTAMP/TIME, INTERVAL -Return type: DATETIME +Return type: TIMESTAMP Synonyms: `ADDDATE`_ @@ -501,7 +501,7 @@ If an argument of type TIME is provided, the local date is used. * - x - x, for any smallcase/uppercase alphabet except [aydmshiHIMYDSEL] -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP, STRING +Argument type: STRING/DATE/TIME/TIMESTAMP, STRING Return type: STRING @@ -522,18 +522,18 @@ DATETIME Description >>>>>>>>>>> -Usage: DATETIME(datetime)/ DATETIME(date, to_timezone) Converts the datetime to a new timezone +Usage: DATETIME(timestamp)/ DATETIME(date, to_timezone) Converts the datetime to a new timezone -Argument type: DATETIME/STRING +Argument type: timestamp/STRING Return type map: -(DATETIME, STRING) -> DATETIME +(TIMESTAMP, STRING) -> TIMESTAMP -(DATETIME) -> DATETIME +(TIMESTAMP) -> TIMESTAMP -Converting datetime with timezone to the second argument timezone. +Converting timestamp with timezone to the second argument timezone. Example:: os> source=people | eval `DATETIME('2004-02-28 23:00:00-10:00', '+10:00')` = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields `DATETIME('2004-02-28 23:00:00-10:00', '+10:00')` @@ -545,7 +545,7 @@ Example:: +---------------------------------------------------+ - The valid timezone range for convert_tz is (-13:59, +14:00) inclusive. Timezones outside of the range will result in null. +The valid timezone range for convert_tz is (-13:59, +14:00) inclusive. Timezones outside of the range will result in null. Example:: os> source=people | eval `DATETIME('2008-01-01 02:00:00', '-14:00')` = DATETIME('2008-01-01 02:00:00', '-14:00') | fields `DATETIME('2008-01-01 02:00:00', '-14:00')` @@ -556,17 +556,6 @@ Example:: | null | +---------------------------------------------+ -The valid timezone range for convert_tz is (-13:59, +14:00) inclusive. Timezones outside of the range will result in null. -Example:: - - os> source=people | eval `DATETIME('2008-02-30 02:00:00', '-00:00')` = DATETIME('2008-02-30 02:00:00', '-00:00') | fields `DATETIME('2008-02-30 02:00:00', '-00:00')` - fetched rows / total rows = 1/1 - +---------------------------------------------+ - | DATETIME('2008-02-30 02:00:00', '-00:00') | - |---------------------------------------------| - | null | - +---------------------------------------------+ - DATE_SUB -------- @@ -576,9 +565,9 @@ Description Usage: date_sub(date, INTERVAL expr unit) subtracts the interval expr from date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL +Argument type: DATE/TIMESTAMP/TIME, INTERVAL -Return type: DATETIME +Return type: TIMESTAMP Synonyms: `SUBDATE`_ @@ -600,7 +589,7 @@ DATEDIFF Usage: Calculates the difference of date parts of given values. If the first argument is time, today's date is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type: LONG @@ -623,7 +612,7 @@ Description Usage: day(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -648,7 +637,7 @@ Description Usage: dayname(date) returns the name of the weekday for date, including Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: STRING @@ -671,7 +660,7 @@ Description Usage: dayofmonth(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -696,7 +685,7 @@ Description Usage: day_of_month(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -721,7 +710,7 @@ Description Usage: dayofweek(date) returns the weekday index for date (1 = Sunday, 2 = Monday, ..., 7 = Saturday). -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -746,7 +735,7 @@ Description Usage: day_of_week(date) returns the weekday index for date (1 = Sunday, 2 = Monday, ..., 7 = Saturday). -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -771,7 +760,7 @@ Description Usage: dayofyear(date) returns the day of the year for date, in the range 1 to 366. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -796,7 +785,7 @@ Description Usage: day_of_year(date) returns the day of the year for date, in the range 1 to 366. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -913,14 +902,14 @@ FROM_UNIXTIME Description >>>>>>>>>>> -Usage: Returns a representation of the argument given as a datetime or character string value. Perform reverse conversion for `UNIX_TIMESTAMP`_ function. +Usage: Returns a representation of the argument given as a timestamp or character string value. Perform reverse conversion for `UNIX_TIMESTAMP`_ function. If second argument is provided, it is used to format the result in the same way as the format string used for the `DATE_FORMAT`_ function. If timestamp is outside of range 1970-01-01 00:00:00 - 3001-01-18 23:59:59.999999 (0 to 32536771199.999999 epoch time), function returns NULL. Argument type: DOUBLE, STRING Return type map: -DOUBLE -> DATETIME +DOUBLE -> TIMESTAMP DOUBLE, STRING -> STRING @@ -951,7 +940,7 @@ Description Usage: Returns a string value containing string format specifiers based on the input arguments. -Argument type: TYPE, STRING, where TYPE must be one of the following tokens: [DATE, TIME, DATETIME, TIMESTAMP], and +Argument type: TYPE, STRING, where TYPE must be one of the following tokens: [DATE, TIME, TIMESTAMP], and STRING must be one of the following tokens: ["USA", "JIS", "ISO", "EUR", "INTERNAL"] (" can be replaced by '). Examples:: @@ -973,7 +962,7 @@ Description Usage: hour(time) extracts the hour value for time. Different from the time of day value, the time value has a large range and can be greater than 23, so the return value of hour(time) can be also greater than 23. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -998,7 +987,7 @@ Description Usage: hour_of_day(time) extracts the hour value for time. Different from the time of day value, the time value has a large range and can be greater than 23, so the return value of hour_of_day(time) can be also greater than 23. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1020,7 +1009,7 @@ LAST_DAY Usage: Returns the last day of the month as a DATE for a valid argument. -Argument type: DATE/DATETIME/STRING/TIMESTAMP/TIME +Argument type: DATE/STRING/TIMESTAMP/TIME Return type: DATE @@ -1145,9 +1134,9 @@ MICROSECOND Description >>>>>>>>>>> -Usage: microsecond(expr) returns the microseconds from the time or datetime expression expr as a number in the range from 0 to 999999. +Usage: microsecond(expr) returns the microseconds from the time or timestamp expression expr as a number in the range from 0 to 999999. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1170,7 +1159,7 @@ Description Usage: minute(time) returns the minute for time, in the range 0 to 59. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1195,7 +1184,7 @@ Description Usage: minute(time) returns the amount of minutes in the day, in the range of 0 to 1439. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1218,7 +1207,7 @@ Description Usage: minute(time) returns the minute for time, in the range 0 to 59. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1243,7 +1232,7 @@ Description Usage: month(date) returns the month for date, in the range 1 to 12 for January to December. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -1268,7 +1257,7 @@ Description Usage: month_of_year(date) returns the month for date, in the range 1 to 12 for January to December. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -1293,7 +1282,7 @@ Description Usage: monthname(date) returns the full name of the month for date. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: STRING @@ -1317,9 +1306,9 @@ Description Returns the current date and time as a value in 'YYYY-MM-DD hh:mm:ss' format. The value is expressed in the cluster time zone. `NOW()` returns a constant time that indicates the time at which the statement began to execute. This differs from the behavior for `SYSDATE() <#sysdate>`_, which returns the exact time at which it executes. -Return type: DATETIME +Return type: TIMESTAMP -Specification: NOW() -> DATETIME +Specification: NOW() -> TIMESTAMP Example:: @@ -1386,7 +1375,7 @@ Description Usage: quarter(date) returns the quarter of the year for date, in the range 1 to 4. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -1435,7 +1424,7 @@ Description Usage: second(time) returns the second for time, in the range 0 to 59. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1460,7 +1449,7 @@ Description Usage: second_of_minute(time) returns the second for time, in the range 0 to 59. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1483,14 +1472,14 @@ STR_TO_DATE Description >>>>>>>>>>> -Usage: str_to_date(string, string) is used to extract a DATETIME from the first argument string using the formats specified in the second argument string. -The input argument must have enough information to be parsed as a DATE, DATETIME, or TIME. +Usage: str_to_date(string, string) is used to extract a TIMESTAMP from the first argument string using the formats specified in the second argument string. +The input argument must have enough information to be parsed as a DATE, TIMESTAMP, or TIME. Acceptable string format specifiers are the same as those used in the `DATE_FORMAT`_ function. -It returns NULL when a statement cannot be parsed due to an invalid pair of arguments, and when 0 is provided for any DATE field. Otherwise, it will return a DATETIME with the parsed values (as well as default values for any field that was not parsed). +It returns NULL when a statement cannot be parsed due to an invalid pair of arguments, and when 0 is provided for any DATE field. Otherwise, it will return a TIMESTAMP with the parsed values (as well as default values for any field that was not parsed). Argument type: STRING, STRING -Return type: DATETIME +Return type: TIMESTAMP Example:: @@ -1512,15 +1501,15 @@ Description Usage: subdate(date, INTERVAL expr unit) / subdate(date, days) subtracts the interval expr from date; subdate(date, days) subtracts the second argument as integer number of days from date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL/LONG +Argument type: DATE/TIMESTAMP/TIME, INTERVAL/LONG Return type map: -(DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME +(DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP (DATE, LONG) -> DATE -(DATETIME/TIMESTAMP/TIME, LONG) -> DATETIME +(TIMESTAMP/TIME, LONG) -> TIMESTAMP Synonyms: `DATE_SUB`_ when invoked with the INTERVAL form of the second argument. @@ -1545,13 +1534,13 @@ Description Usage: subtime(expr1, expr2) subtracts expr2 from expr1 and returns the result. If argument is TIME, today's date is used; if argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type map: -(DATE/DATETIME/TIMESTAMP, DATE/DATETIME/TIMESTAMP/TIME) -> DATETIME +(DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> TIMESTAMP -(TIME, DATE/DATETIME/TIMESTAMP/TIME) -> TIME +(TIME, DATE/TIMESTAMP/TIME) -> TIME Antonyms: `ADDTIME`_ @@ -1589,7 +1578,7 @@ Example:: | 10:14:48 | +---------------------------+ - os> source=people | eval `'2007-03-01 10:20:30' - '20:40:50'` = SUBTIME(TIMESTAMP('2007-03-01 10:20:30'), DATETIME('2002-03-04 20:40:50')) | fields `'2007-03-01 10:20:30' - '20:40:50'` + os> source=people | eval `'2007-03-01 10:20:30' - '20:40:50'` = SUBTIME(TIMESTAMP('2007-03-01 10:20:30'), TIMESTAMP('2002-03-04 20:40:50')) | fields `'2007-03-01 10:20:30' - '20:40:50'` fetched rows / total rows = 1/1 +--------------------------------------+ | '2007-03-01 10:20:30' - '20:40:50' | @@ -1610,9 +1599,9 @@ If the argument is given, it specifies a fractional seconds precision from 0 to Optional argument type: INTEGER -Return type: DATETIME +Return type: TIMESTAMP -Specification: SYSDATE([INTEGER]) -> DATETIME +Specification: SYSDATE([INTEGER]) -> TIMESTAMP Example:: @@ -1631,9 +1620,9 @@ TIME Description >>>>>>>>>>> -Usage: time(expr) constructs a time type with the input string expr as a time. If the argument is of date/datetime/time/timestamp, it extracts the time value part from the expression. +Usage: time(expr) constructs a time type with the input string expr as a time. If the argument is of date/time/timestamp, it extracts the time value part from the expression. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: TIME @@ -1682,7 +1671,7 @@ Usage: time_format(time, format) formats the time argument using the specifiers This supports a subset of the time format specifiers available for the `date_format`_ function. Using date format specifiers supported by `date_format`_ will return 0 or null. Acceptable format specifiers are listed in the table below. -If an argument of type DATE is passed in, it is treated as a DATETIME at midnight (i.e., 00:00:00). +If an argument of type DATE is passed in, it is treated as a TIMESTAMP at midnight (i.e., 00:00:00). .. list-table:: The following table describes the available specifier arguments. :widths: 20 80 @@ -1712,7 +1701,7 @@ If an argument of type DATE is passed in, it is treated as a DATETIME at midnigh - Time, 24-hour (hh:mm:ss) -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP, STRING +Argument type: STRING/DATE/TIME/TIMESTAMP, STRING Return type: STRING @@ -1735,7 +1724,7 @@ Description Usage: time_to_sec(time) returns the time argument, converted to seconds. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: LONG @@ -1780,15 +1769,15 @@ Description >>>>>>>>>>> Usage: timestamp(expr) constructs a timestamp type with the input string `expr` as an timestamp. If the argument is not a string, it casts `expr` to timestamp type with default timezone UTC. If argument is a time, it applies today's date before cast. -With two arguments `timestamp(expr1, expr2)` adds the time expression `expr2` to the date or datetime expression `expr1` and returns the result as a timestamp value. +With two arguments `timestamp(expr1, expr2)` adds the time expression `expr2` to the date or timestamp expression `expr1` and returns the result as a timestamp value. -Argument type: STRING/DATE/TIME/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type map: -(STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP +(STRING/DATE/TIME/TIMESTAMP) -> TIMESTAMP -(STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP +(STRING/DATE/TIME/TIMESTAMP, STRING/DATE/TIME/TIMESTAMP) -> TIMESTAMP Example:: @@ -1807,11 +1796,11 @@ TIMESTAMPADD Description >>>>>>>>>>> -Usage: Returns a DATETIME value based on a passed in DATE/DATETIME/TIME/TIMESTAMP/STRING argument and an INTERVAL and INTEGER argument which determine the amount of time to be added. -If the third argument is a STRING, it must be formatted as a valid DATETIME. If only a TIME is provided, a DATETIME is still returned with the DATE portion filled in using the current date. -If the third argument is a DATE, it will be automatically converted to a DATETIME. +Usage: Returns a TIMESTAMP value based on a passed in DATE/TIME/TIMESTAMP/STRING argument and an INTERVAL and INTEGER argument which determine the amount of time to be added. +If the third argument is a STRING, it must be formatted as a valid TIMESTAMP. If only a TIME is provided, a TIMESTAMP is still returned with the DATE portion filled in using the current date. +If the third argument is a DATE, it will be automatically converted to a TIMESTAMP. -Argument type: INTERVAL, INTEGER, DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: INTERVAL, INTEGER, DATE/TIME/TIMESTAMP/STRING INTERVAL must be one of the following tokens: [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] @@ -1833,11 +1822,11 @@ Description >>>>>>>>>>> Usage: TIMESTAMPDIFF(interval, start, end) returns the difference between the start and end date/times in interval units. -If a TIME is provided as an argument, it will be converted to a DATETIME with the DATE portion filled in using the current date. -Arguments will be automatically converted to a DATETIME/TIME/TIMESTAMP when appropriate. -Any argument that is a STRING must be formatted as a valid DATETIME. +If a TIME is provided as an argument, it will be converted to a TIMESTAMP with the DATE portion filled in using the current date. +Arguments will be automatically converted to a TIME/TIMESTAMP when appropriate. +Any argument that is a STRING must be formatted as a valid TIMESTAMP. -Argument type: INTERVAL, DATE/DATETIME/TIME/TIMESTAMP/STRING, DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: INTERVAL, DATE/TIME/TIMESTAMP/STRING, DATE/TIME/TIMESTAMP/STRING INTERVAL must be one of the following tokens: [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] @@ -1860,7 +1849,7 @@ Description Usage: to_days(date) returns the day number (the number of days since year 0) of the given date. Returns NULL if date is invalid. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: LONG @@ -1884,7 +1873,7 @@ Description Usage: to_seconds(date) returns the number of seconds since the year 0 of the given value. Returns NULL if value is invalid. An argument of a LONG type can be used. It must be formatted as YMMDD, YYMMDD, YYYMMDD or YYYYMMDD. Note that a LONG type argument cannot have leading 0s as it will be parsed using an octal numbering system. -Argument type: STRING/LONG/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/LONG/DATE/TIME/TIMESTAMP Return type: LONG @@ -1906,11 +1895,11 @@ Description >>>>>>>>>>> Usage: Converts given argument to Unix time (seconds since Epoch - very beginning of year 1970). If no argument given, it returns the current Unix time. -The date argument may be a DATE, DATETIME, or TIMESTAMP string, or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. If the argument includes a time part, it may optionally include a fractional seconds part. +The date argument may be a DATE, or TIMESTAMP string, or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. If the argument includes a time part, it may optionally include a fractional seconds part. If argument is in invalid format or outside of range 1970-01-01 00:00:00 - 3001-01-18 23:59:59.999999 (0 to 32536771199.999999 epoch time), function returns NULL. You can use `FROM_UNIXTIME`_ to do reverse conversion. -Argument type: /DOUBLE/DATE/DATETIME/TIMESTAMP +Argument type: /DOUBLE/DATE/TIMESTAMP Return type: DOUBLE @@ -1979,9 +1968,9 @@ Description Returns the current UTC timestamp as a value in 'YYYY-MM-DD hh:mm:ss'. -Return type: DATETIME +Return type: TIMESTAMP -Specification: UTC_TIMESTAMP() -> DATETIME +Specification: UTC_TIMESTAMP() -> TIMESTAMP Example:: @@ -2043,7 +2032,7 @@ Usage: week(date[, mode]) returns the week number for date. If the mode argument - 1-53 - with a Monday in this year -Argument type: DATE/DATETIME/TIMESTAMP/STRING +Argument type: DATE/TIMESTAMP/STRING Return type: INTEGER @@ -2070,7 +2059,7 @@ Usage: weekday(date) returns the weekday index for date (0 = Monday, 1 = Tuesday It is similar to the `dayofweek`_ function, but returns different indexes for each day. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -2134,7 +2123,7 @@ Usage: week_of_year(date[, mode]) returns the week number for date. If the mode - 1-53 - with a Monday in this year -Argument type: DATE/DATETIME/TIMESTAMP/STRING +Argument type: DATE/TIMESTAMP/STRING Return type: INTEGER @@ -2159,7 +2148,7 @@ Description Usage: year(date) returns the year for date, in the range 1000 to 9999, or 0 for the “zero” date. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -2182,7 +2171,7 @@ Description Usage: yearweek(date) returns the year and week for date as an integer. It accepts and optional mode arguments aligned with those available for the `WEEK`_ function. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER diff --git a/docs/user/ppl/functions/system.rst b/docs/user/ppl/functions/system.rst index fbe9860dce..cfe0414c49 100644 --- a/docs/user/ppl/functions/system.rst +++ b/docs/user/ppl/functions/system.rst @@ -27,5 +27,5 @@ Example:: +----------------+---------------+-----------------+------------------+ | typeof(date) | typeof(int) | typeof(now()) | typeof(column) | |----------------+---------------+-----------------+------------------| - | DATE | INTEGER | DATETIME | OBJECT | + | DATE | INTEGER | TIMESTAMP | OBJECT | +----------------+---------------+-----------------+------------------+ diff --git a/docs/user/ppl/general/datatypes.rst b/docs/user/ppl/general/datatypes.rst index cabc689526..18555dec3d 100644 --- a/docs/user/ppl/general/datatypes.rst +++ b/docs/user/ppl/general/datatypes.rst @@ -39,8 +39,6 @@ The PPL support the following data types. +---------------+ | timestamp | +---------------+ -| datetime | -+---------------+ | date | +---------------+ | time | @@ -114,7 +112,7 @@ Numeric values ranged from -2147483648 to +2147483647 are recognized as integer Date and Time Data Types ======================== -The date and time data types are the types that represent temporal values and PPL plugin supports types including DATE, TIME, DATETIME, TIMESTAMP and INTERVAL. By default, the OpenSearch DSL uses date type as the only date and time related type, which has contained all information about an absolute time point. To integrate with PPL language, each of the types other than timestamp is holding part of temporal or timezone information, and the usage to explicitly clarify the date and time types is reflected in the datetime functions (see `Functions `_ for details), where some functions might have restrictions in the input argument type. +The date and time data types are the types that represent temporal values and PPL plugin supports types including DATE, TIME, TIMESTAMP and INTERVAL. By default, the OpenSearch DSL uses date type as the only date and time related type, which has contained all information about an absolute time point. To integrate with PPL language, each of the types other than timestamp is holding part of temporal or timezone information, and the usage to explicitly clarify the date and time types is reflected in the datetime functions (see `Functions `_ for details), where some functions might have restrictions in the input argument type. Date @@ -141,19 +139,6 @@ Time represents the time on the clock or watch with no regard for which timezone +------+-----------------------+----------------------------------------+ -Datetime --------- - -Datetime type is the combination of date and time. The conversion rule of date or time to datetime is described in `Conversion between date and time types`_. Datetime type does not contain timezone information. For an absolute time point that contains both date time and timezone information, see `Timestamp`_. - -+----------+----------------------------------+--------------------------------------------------------------+ -| Type | Syntax | Range | -+==========+==================================+==============================================================+ -| Datetime | 'yyyy-MM-dd hh:mm:ss[.fraction]' | '0001-01-01 00:00:00.000000' to '9999-12-31 23:59:59.999999' | -+----------+----------------------------------+--------------------------------------------------------------+ - - - Timestamp --------- @@ -183,38 +168,26 @@ The expr is any expression that can be iterated to a quantity value eventually, Conversion between date and time types -------------------------------------- -Basically the date and time types except interval can be converted to each other, but might suffer some alteration of the value or some information loss, for example extracting the time value from a datetime value, or convert a date value to a datetime value and so forth. Here lists the summary of the conversion rules that PPL plugin supports for each of the types: +Basically the date and time types except interval can be converted to each other, but might suffer some alteration of the value or some information loss, for example extracting the time value from a timestamp value, or convert a date value to a timestamp value and so forth. Here lists the summary of the conversion rules that PPL plugin supports for each of the types: Conversion from DATE >>>>>>>>>>>>>>>>>>>> - Since the date value does not have any time information, conversion to `Time`_ type is not useful, and will always return a zero time value '00:00:00'. -- Conversion from date to datetime has a data fill-up due to the lack of time information, and it attaches the time '00:00:00' to the original date by default and forms a datetime instance. For example, the result to covert date '2020-08-17' to datetime type is datetime '2020-08-17 00:00:00'. - -- Conversion to timestamp is to alternate both the time value and the timezone information, and it attaches the zero time value '00:00:00' and the session timezone (UTC by default) to the date. For example, the result to covert date '2020-08-17' to datetime type with session timezone UTC is datetime '2020-08-17 00:00:00' UTC. +- Conversion to timestamp is to alternate both the time value and the timezone information, and it attaches the zero time value '00:00:00' and the session timezone (UTC by default) to the date. For example, the result to covert date '2020-08-17' to timestamp type with session timezone UTC is timestamp '2020-08-17 00:00:00' UTC. Conversion from TIME >>>>>>>>>>>>>>>>>>>> -- Time value cannot be converted to any other date and time types since it does not contain any date information, so it is not meaningful to give no date info to a date/datetime/timestamp instance. - - -Conversion from DATETIME ->>>>>>>>>>>>>>>>>>>>>>>> - -- Conversion from datetime to date is to extract the date part from the datetime value. For example, the result to convert datetime '2020-08-17 14:09:00' to date is date '2020-08-08'. - -- Conversion to time is to extract the time part from the datetime value. For example, the result to convert datetime '2020-08-17 14:09:00' to time is time '14:09:00'. - -- Since the datetime type does not contain timezone information, the conversion to timestamp needs to fill up the timezone part with the session timezone. For example, the result to convert datetime '2020-08-17 14:09:00' with system timezone of UTC, to timestamp is timestamp '2020-08-17 14:09:00' UTC. +- Time value cannot be converted to any other date and time types since it does not contain any date information, so it is not meaningful to give no date info to a date/timestamp instance. Conversion from TIMESTAMP >>>>>>>>>>>>>>>>>>>>>>>>> -- Conversion from timestamp is much more straightforward. To convert it to date is to extract the date value, and conversion to time is to extract the time value. Conversion to datetime, it will extracts the datetime value and leave the timezone information over. For example, the result to convert datetime '2020-08-17 14:09:00' UTC to date is date '2020-08-17', to time is '14:09:00' and to datetime is datetime '2020-08-17 14:09:00'. +- Conversion from timestamp is much more straightforward. To convert it to date is to extract the date value, and conversion to time is to extract the time value. For example, the result to convert timestamp '2020-08-17 14:09:00' UTC to date is date '2020-08-17', to time is '14:09:00'. String Data Types diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java index 105669c7ca..a0749387d5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java @@ -30,7 +30,7 @@ public void inRangeZeroToPositive() throws IOException { "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); } @@ -42,7 +42,7 @@ public void inRangeZeroToZero() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); } @@ -54,7 +54,7 @@ public void inRangePositiveToPositive() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); } @@ -66,7 +66,7 @@ public void inRangeNegativeToPositive() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); } @@ -78,7 +78,7 @@ public void inRangeNoTZChange() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); } @@ -90,7 +90,7 @@ public void inRangeTwentyFourHourChange() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); } @@ -102,7 +102,7 @@ public void inRangeFifteenMinuteTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); } @@ -114,7 +114,7 @@ public void nullFromFieldUnder() throws IOException { "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -126,7 +126,7 @@ public void nullToFieldOver() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -137,7 +137,7 @@ public void nullFromGarbageInput1() throws IOException { String.format( "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','test') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -148,7 +148,7 @@ public void nullFromGarbageInput2() throws IOException { String.format( "source=%s | eval f = convert_tz('2021test','-12:00','+00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -160,7 +160,7 @@ public void nullDateTimeInvalidDateValueFebruary() throws IOException { "source=%s | eval f = convert_tz('2021-02-30 10:00:00','+00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -172,7 +172,7 @@ public void nullDateTimeInvalidDateValueApril() throws IOException { "source=%s | eval f = convert_tz('2021-04-31 10:00:00','+00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -184,7 +184,7 @@ public void nullDateTimeInvalidDateValueMonth() throws IOException { "source=%s | eval f = convert_tz('2021-13-03 10:00:00','+00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java index 6f6b5cc297..7cc083cbb6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java @@ -98,32 +98,6 @@ public static Iterable compareTwoTimes() { $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareTwoDateTimes() { - return Arrays.asList( - $$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $( - "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", - "lte3", - false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { return Arrays.asList( @@ -161,22 +135,6 @@ public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), @@ -187,37 +145,6 @@ public static Iterable compareEqTimestampWithOtherTypes() { $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareEqDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); @@ -227,10 +154,6 @@ public static Iterable compareEqDateWithOtherTypes() { $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), @@ -242,10 +165,6 @@ public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), @@ -261,22 +180,6 @@ public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), @@ -287,37 +190,6 @@ public static Iterable compareNeqTimestampWithOtherTypes() { $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareNeqDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); @@ -327,10 +199,6 @@ public static Iterable compareNeqDateWithOtherTypes() { $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), @@ -342,10 +210,6 @@ public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -361,22 +225,6 @@ public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), @@ -387,37 +235,6 @@ public static Iterable compareLtTimestampWithOtherTypes() { $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareLtDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { return Arrays.asList( @@ -426,10 +243,6 @@ public static Iterable compareLtDateWithOtherTypes() { $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), @@ -441,10 +254,6 @@ public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -460,22 +269,6 @@ public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), @@ -486,37 +279,6 @@ public static Iterable compareGtTimestampWithOtherTypes() { $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareGtDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { return Arrays.asList( @@ -525,10 +287,6 @@ public static Iterable compareGtDateWithOtherTypes() { $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), @@ -540,10 +298,6 @@ public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -559,22 +313,6 @@ public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), @@ -585,37 +323,6 @@ public static Iterable compareLteTimestampWithOtherTypes() { $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareLteDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { return Arrays.asList( @@ -624,10 +331,6 @@ public static Iterable compareLteDateWithOtherTypes() { $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), @@ -639,10 +342,6 @@ public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), @@ -658,22 +357,6 @@ public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), @@ -684,37 +367,6 @@ public static Iterable compareGteTimestampWithOtherTypes() { $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareGteDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { return Arrays.asList( @@ -723,10 +375,6 @@ public static Iterable compareGteDateWithOtherTypes() { $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), @@ -738,10 +386,6 @@ public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java index 1df87a87b3..3ea6897087 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java @@ -67,25 +67,15 @@ public void testAddDateWithDays() throws IOException { + " f = adddate(timestamp('2020-09-16 17:30:00'), 1)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 07:40:00'), 1)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-17 07:40:00")); - result = executeQuery( String.format( "source=%s | eval " + " f = adddate(TIME('07:40:00'), 0)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @@ -98,17 +88,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); result = @@ -118,7 +98,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(date('2020-09-16'), interval 1 day) " + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); result = @@ -128,7 +108,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(date('2020-09-16'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); result = @@ -138,7 +118,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(TIME('07:40:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -155,7 +135,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(TIME('07:40:00'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -173,7 +153,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); result = @@ -182,7 +162,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); result = @@ -191,7 +171,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); result = @@ -200,7 +180,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); result = @@ -209,7 +189,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); result = @@ -218,7 +198,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); result = @@ -227,7 +207,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); result = @@ -236,7 +216,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = @@ -245,7 +225,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -258,17 +238,7 @@ public void testDateAdd() throws IOException { + " f = date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); result = @@ -278,7 +248,7 @@ public void testDateAdd() throws IOException { + " f = date_add(date('2020-09-16'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); result = @@ -288,7 +258,7 @@ public void testDateAdd() throws IOException { + " f = date_add(date('2020-09-16'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); result = @@ -298,7 +268,7 @@ public void testDateAdd() throws IOException { + " f = date_add(TIME('07:40:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -315,7 +285,7 @@ public void testDateAdd() throws IOException { + " f = date_add(TIME('07:40:00'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -329,7 +299,7 @@ public void testDateAdd() throws IOException { String.format( "source=%s | eval " + " f = DATE_ADD(birthdate, INTERVAL 1 YEAR)" + " | fields f", TEST_INDEX_BANK)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifyDataRows( result, rows("2018-10-23 00:00:00"), @@ -349,7 +319,7 @@ public void testDateTime() throws IOException { "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); result = @@ -357,7 +327,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); result = @@ -365,7 +335,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); result = @@ -373,7 +343,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); result = @@ -381,7 +351,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2003-02-28 23:00:00-10:00', '+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2003-03-01 19:00:00")); result = @@ -389,7 +359,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); result = @@ -397,7 +367,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); result = @@ -405,7 +375,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = @@ -413,7 +383,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = @@ -421,7 +391,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = @@ -429,7 +399,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = @@ -437,7 +407,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '-14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -450,17 +420,7 @@ public void testDateSub() throws IOException { + " f = date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); result = @@ -470,7 +430,7 @@ public void testDateSub() throws IOException { + " f = date_sub(date('2020-09-16'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); result = @@ -480,7 +440,7 @@ public void testDateSub() throws IOException { + " f = date_sub(date('2020-09-16'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); result = @@ -490,7 +450,7 @@ public void testDateSub() throws IOException { + " f = date_sub(TIME('07:40:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -507,7 +467,7 @@ public void testDateSub() throws IOException { + " f = date_sub(TIME('07:40:00'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1051,7 +1011,7 @@ public void testSubDateDays() throws IOException { + " f = subdate(timestamp('2020-09-16 17:30:00'), 1)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); result = @@ -1067,7 +1027,7 @@ public void testSubDateDays() throws IOException { String.format( "source=%s | eval " + " f = subdate(TIME('07:40:00'), 0)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @@ -1080,17 +1040,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); result = @@ -1100,7 +1050,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(date('2020-09-16'), interval 1 day) " + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); result = @@ -1110,7 +1060,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(date('2020-09-16'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); result = @@ -1120,7 +1070,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(TIME('07:40:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1137,7 +1087,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(TIME('07:40:00'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1316,17 +1266,17 @@ public void testAddTime() throws IOException { + " DATE('2004-01-01')), `'2004-01-01' + '23:59:59'` =" + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' + '00:05:42'` =" + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' + '09:07:00'` =" - + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00'))" + " | fields `'2008-12-12' + 0`, `'23:59:59' + 0`, `'2004-01-01' + '23:59:59'`," + " `'10:20:30' + '00:05:42'`, `'15:42:13' + '09:07:00'`", TEST_INDEX_DATE)); verifySchema( result, - schema("'2008-12-12' + 0", null, "datetime"), + schema("'2008-12-12' + 0", null, "timestamp"), schema("'23:59:59' + 0", null, "time"), - schema("'2004-01-01' + '23:59:59'", null, "datetime"), + schema("'2004-01-01' + '23:59:59'", null, "timestamp"), schema("'10:20:30' + '00:05:42'", null, "time"), - schema("'15:42:13' + '09:07:00'", null, "datetime")); + schema("'15:42:13' + '09:07:00'", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1347,17 +1297,17 @@ public void testSubTime() throws IOException { + " DATE('2004-01-01')), `'2004-01-01' - '23:59:59'` =" + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' - '00:05:42'` =" + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' - '09:07:00'` =" - + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00'))" + " | fields `'2008-12-12' - 0`, `'23:59:59' - 0`, `'2004-01-01' - '23:59:59'`," + " `'10:20:30' - '00:05:42'`, `'15:42:13' - '09:07:00'`", TEST_INDEX_DATE)); verifySchema( result, - schema("'2008-12-12' - 0", null, "datetime"), + schema("'2008-12-12' - 0", null, "timestamp"), schema("'23:59:59' - 0", null, "time"), - schema("'2004-01-01' - '23:59:59'", null, "datetime"), + schema("'2004-01-01' - '23:59:59'", null, "timestamp"), schema("'10:20:30' - '00:05:42'", null, "time"), - schema("'15:42:13' - '09:07:00'", null, "datetime")); + schema("'15:42:13' - '09:07:00'", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1378,8 +1328,8 @@ public void testFromUnixTime() throws IOException { TEST_INDEX_DATE)); verifySchema( result, - schema("f1", null, "datetime"), - schema("f2", null, "datetime"), + schema("f1", null, "timestamp"), + schema("f2", null, "timestamp"), schema("f3", null, "string")); verifySome( result.getJSONArray("datarows"), @@ -1427,6 +1377,7 @@ public void testPeriodDiff() throws IOException { verifySome(result.getJSONArray("datarows"), rows(11, -25)); } + @Test public void testDateDiff() throws IOException { var result = executeQuery( @@ -1435,7 +1386,7 @@ public void testDateDiff() throws IOException { + " 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')), `'2001-02-01' -" + " '2004-01-01'` = DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01" + " 00:00:00')), `'2004-01-01' - '2002-02-01'` = DATEDIFF(TIMESTAMP('2004-01-01" - + " 00:00:00'), DATETIME('2002-02-01 14:25:30')), `today - today` =" + + " 00:00:00'), TIMESTAMP('2002-02-01 14:25:30')), `today - today` =" + " DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) | fields `'2000-01-02' -" + " '2000-01-01'`, `'2001-02-01' - '2004-01-01'`, `'2004-01-01' -" + " '2002-02-01'`, `today - today`", @@ -1519,7 +1470,7 @@ public void testToSeconds() throws IOException { String.format( "source=%s | eval f1 = to_seconds(date('2008-10-07')) | " + "eval f2 = to_seconds('2020-09-16 07:40:00') | " - + "eval f3 = to_seconds(DATETIME('2020-09-16 07:40:00')) | fields f1, f2, f3", + + "eval f3 = to_seconds(TIMESTAMP('2020-09-16 07:40:00')) | fields f1, f2, f3", TEST_INDEX_DATE)); verifySchema( result, schema("f1", null, "long"), schema("f2", null, "long"), schema("f3", null, "long")); @@ -1533,7 +1484,7 @@ public void testStrToDate() throws IOException { String.format( "source=%s | eval f = str_to_date('01,5,2013', '%s') | fields f", TEST_INDEX_DATE, "%d,%m,%Y")); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2013-05-01 00:00:00")); } @@ -1544,7 +1495,7 @@ public void testTimeStampAdd() throws IOException { String.format( "source=%s | eval f = timestampadd(YEAR, 15, '2001-03-06 00:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2016-03-06 00:00:00")); } @@ -1556,7 +1507,7 @@ public void testTimestampDiff() throws IOException { "source=%s | eval f = timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06" + " 00:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(4)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java index dd86470a39..f9dc7d8027 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java @@ -30,7 +30,7 @@ public void inRangeZeroToStringTZ() throws IOException { "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); } @@ -41,7 +41,7 @@ public void inRangeZeroToPositive() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); } @@ -52,7 +52,7 @@ public void inRangeNegativeToPositive() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); } @@ -63,7 +63,7 @@ public void inRangeTwentyHourOffset() throws IOException { String.format( "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); } @@ -74,7 +74,7 @@ public void inRangeYearChange() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); } @@ -85,7 +85,7 @@ public void inRangeZeroToMax() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); } @@ -96,7 +96,7 @@ public void inRangeNoToTZ() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @@ -107,7 +107,7 @@ public void inRangeNoTZ() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @@ -118,7 +118,7 @@ public void nullField3Over() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -129,7 +129,7 @@ public void nullField2Under() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -140,7 +140,7 @@ public void nullTField3Over() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '+15:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -151,7 +151,7 @@ public void nullDateTimeInvalidDateValueFebruary() throws IOException { String.format( "source=%s | eval f = DATETIME('2021-02-30 10:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -162,7 +162,7 @@ public void nullDateTimeInvalidDateValueApril() throws IOException { String.format( "source=%s | eval f = DATETIME('2021-04-31 10:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -173,7 +173,7 @@ public void nullDateTimeInvalidDateValueMonth() throws IOException { String.format( "source=%s | eval f = DATETIME('2021-13-03 10:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java index 1c23935f81..c1356ce838 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java @@ -45,11 +45,10 @@ public void typeof_sql_types() throws IOException { "source=%s | eval " + "`timestamp` = typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + "`time` = typeof(CAST('09:07:00' AS TIME))," - + "`date` = typeof(CAST('1961-04-12' AS DATE))," - + "`datetime` = typeof(DATETIME('1961-04-12 09:07:00'))" - + " | fields `timestamp`, `time`, `date`, `datetime`", + + "`date` = typeof(CAST('1961-04-12' AS DATE))" + + " | fields `timestamp`, `time`, `date`", TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE")); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java index 339cd56370..3f71499f97 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java @@ -287,13 +287,14 @@ public void testPushDownAggregationOnNullDateTimeValuesReturnsNull() throws IOEx var response = executeQuery( String.format( - "SELECT " + "max(datetime(NULL)), min(datetime(NULL)), avg(datetime(NULL)) from %s", + "SELECT " + + "max(timestamp(NULL)), min(timestamp(NULL)), avg(timestamp(NULL)) from %s", TEST_INDEX_CALCS)); verifySchema( response, - schema("max(datetime(NULL))", null, "datetime"), - schema("min(datetime(NULL))", null, "datetime"), - schema("avg(datetime(NULL))", null, "datetime")); + schema("max(timestamp(NULL))", null, "timestamp"), + schema("min(timestamp(NULL))", null, "timestamp"), + schema("avg(timestamp(NULL))", null, "timestamp")); verifyDataRows(response, rows(null, null, null)); } @@ -480,8 +481,8 @@ public void testMinDateTimePushedDown() throws IOException { var response = executeQuery( String.format( - "SELECT min(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); - verifySchema(response, schema("min(datetime(CAST(time0 AS STRING)))", null, "datetime")); + "SELECT min(timestamp(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(timestamp(CAST(time0 AS STRING)))", null, "timestamp")); verifyDataRows(response, rows("1899-12-30 21:07:32")); } @@ -490,8 +491,8 @@ public void testMaxDateTimePushedDown() throws IOException { var response = executeQuery( String.format( - "SELECT max(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); - verifySchema(response, schema("max(datetime(CAST(time0 AS STRING)))", null, "datetime")); + "SELECT max(timestamp(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(timestamp(CAST(time0 AS STRING)))", null, "timestamp")); verifyDataRows(response, rows("1900-01-01 20:36:00")); } @@ -500,8 +501,8 @@ public void testAvgDateTimePushedDown() throws IOException { var response = executeQuery( String.format( - "SELECT avg(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); - verifySchema(response, schema("avg(datetime(CAST(time0 AS STRING)))", null, "datetime")); + "SELECT avg(timestamp(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(timestamp(CAST(time0 AS STRING)))", null, "timestamp")); verifyDataRows(response, rows("1900-01-01 03:35:00.236")); } @@ -591,13 +592,15 @@ public void testMinDateTimeInMemory() throws IOException { var response = executeQuery( String.format( - "SELECT min(datetime(CAST(time0 AS STRING)))" + "SELECT min(timestamp(CAST(time0 AS STRING)))" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); verifySchema( response, schema( - "min(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + "min(timestamp(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", + null, + "timestamp")); verifySome(response.getJSONArray("datarows"), rows("1899-12-30 21:07:32")); } @@ -606,13 +609,15 @@ public void testMaxDateTimeInMemory() throws IOException { var response = executeQuery( String.format( - "SELECT max(datetime(CAST(time0 AS STRING)))" + "SELECT max(timestamp(CAST(time0 AS STRING)))" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); verifySchema( response, schema( - "max(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + "max(timestamp(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", + null, + "timestamp")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 20:36:00")); } @@ -621,13 +626,15 @@ public void testAvgDateTimeInMemory() throws IOException { var response = executeQuery( String.format( - "SELECT avg(datetime(CAST(time0 AS STRING)))" + "SELECT avg(timestamp(CAST(time0 AS STRING)))" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); verifySchema( response, schema( - "avg(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + "avg(timestamp(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", + null, + "timestamp")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 03:35:00.236")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java index 76600b6561..776c4de290 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java @@ -26,7 +26,7 @@ public void init() throws Exception { public void inRangeZeroToPositive() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2008-05-15 12:00:00','+00:00','+10:00')"); verifySchema( - result, schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "datetime")); + result, schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "timestamp")); verifyDataRows(result, rows("2008-05-15 22:00:00")); } @@ -34,7 +34,7 @@ public void inRangeZeroToPositive() throws IOException { public void inRangeNegativeZeroToPositiveZero() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','-00:00','+00:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 00:00:00")); } @@ -42,7 +42,7 @@ public void inRangeNegativeZeroToPositiveZero() throws IOException { public void inRangePositiveToPositive() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','+10:00','+11:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 01:00:00")); } @@ -50,7 +50,7 @@ public void inRangePositiveToPositive() throws IOException { public void inRangeNegativeToPositive() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-08:00','+09:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-13 04:34:50")); } @@ -58,7 +58,7 @@ public void inRangeNegativeToPositive() throws IOException { public void inRangeSameTimeZone() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+09:00','+09:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 11:34:50")); } @@ -66,7 +66,7 @@ public void inRangeSameTimeZone() throws IOException { public void inRangeTwentyFourHourTimeOffset() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+12:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-13 11:34:50")); } @@ -74,7 +74,7 @@ public void inRangeTwentyFourHourTimeOffset() throws IOException { public void inRangeFifteenMinuteTimeZones() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:30','+05:45')"); verifySchema( - result, schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "datetime")); + result, schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 09:15:00")); } @@ -82,7 +82,7 @@ public void inRangeFifteenMinuteTimeZones() throws IOException { public void inRangeRandomTimes() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:31','+05:11')"); verifySchema( - result, schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "datetime")); + result, schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 08:40:00")); } @@ -90,7 +90,7 @@ public void inRangeRandomTimes() throws IOException { public void nullField2Under() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-30 11:34:50','-14:00','+08:00')"); verifySchema( - result, schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "datetime")); + result, schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -98,7 +98,7 @@ public void nullField2Under() throws IOException { public void nullField3Over() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+14:01')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -106,7 +106,7 @@ public void nullField3Over() throws IOException { public void inRangeMinOnPoint() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 15:00:00','-13:59','-13:59')"); verifySchema( - result, schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "datetime")); + result, schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 15:00:00")); } @@ -118,7 +118,7 @@ public void inRangeMinOnPoint() throws IOException { public void nullField3InvalidInput() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+10:0','+14:01')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -126,16 +126,16 @@ public void nullField3InvalidInput() throws IOException { public void nullField2InvalidInput() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+14:01','****')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } - // Invalid input in the datetime field of CONVERT_TZ results in a null field. It is any input + // Invalid input in the timestamp field of CONVERT_TZ results in a null field. It is any input // which is not of the format `yyyy-MM-dd HH:mm:ss` @Test - public void nullDateTimeInvalidInput() throws IOException { + public void nulltimestampInvalidInput() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021----','+00:00','+00:00')"); - verifySchema(result, schema("convert_tz('2021----','+00:00','+00:00')", null, "datetime")); + verifySchema(result, schema("convert_tz('2021----','+00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -143,7 +143,7 @@ public void nullDateTimeInvalidInput() throws IOException { public void nullDateTimeInvalidDateValueFebruary() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-02-30 10:00:00','+00:00','+00:00')"); verifySchema( - result, schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "datetime")); + result, schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -151,7 +151,7 @@ public void nullDateTimeInvalidDateValueFebruary() throws IOException { public void nullDateTimeInvalidDateValueApril() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-04-31 10:00:00','+00:00','+00:00')"); verifySchema( - result, schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "datetime")); + result, schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -159,7 +159,7 @@ public void nullDateTimeInvalidDateValueApril() throws IOException { public void nullDateTimeInvalidDateValueMonth() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-13-03 10:00:00','+00:00','+00:00')"); verifySchema( - result, schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "datetime")); + result, schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java index 432daef82f..af3d81e374 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java @@ -105,32 +105,6 @@ public static Iterable compareTwoTimes() { $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareTwoDateTimes() { - return Arrays.asList( - $$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $( - "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", - "lte3", - false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { return Arrays.asList( @@ -168,22 +142,6 @@ public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), @@ -194,37 +152,6 @@ public static Iterable compareEqTimestampWithOtherTypes() { $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareEqDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); @@ -234,10 +161,6 @@ public static Iterable compareEqDateWithOtherTypes() { $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), @@ -249,10 +172,6 @@ public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), @@ -268,22 +187,6 @@ public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), @@ -294,37 +197,6 @@ public static Iterable compareNeqTimestampWithOtherTypes() { $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareNeqDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); @@ -334,10 +206,6 @@ public static Iterable compareNeqDateWithOtherTypes() { $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), @@ -349,10 +217,6 @@ public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -368,22 +232,6 @@ public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), @@ -394,37 +242,6 @@ public static Iterable compareLtTimestampWithOtherTypes() { $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareLtDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { return Arrays.asList( @@ -433,10 +250,6 @@ public static Iterable compareLtDateWithOtherTypes() { $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), @@ -448,10 +261,6 @@ public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -467,22 +276,6 @@ public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), @@ -493,37 +286,6 @@ public static Iterable compareGtTimestampWithOtherTypes() { $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareGtDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { return Arrays.asList( @@ -532,10 +294,6 @@ public static Iterable compareGtDateWithOtherTypes() { $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), @@ -547,10 +305,6 @@ public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -566,22 +320,6 @@ public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), @@ -592,37 +330,6 @@ public static Iterable compareLteTimestampWithOtherTypes() { $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareLteDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { return Arrays.asList( @@ -631,10 +338,6 @@ public static Iterable compareLteDateWithOtherTypes() { $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), @@ -646,10 +349,6 @@ public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), @@ -665,22 +364,6 @@ public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), @@ -691,37 +374,6 @@ public static Iterable compareGteTimestampWithOtherTypes() { $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareGteDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { return Arrays.asList( @@ -730,10 +382,6 @@ public static Iterable compareGteDateWithOtherTypes() { $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), @@ -745,10 +393,6 @@ public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java index 33eb8b693f..0ec77f9f31 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java @@ -94,15 +94,15 @@ public void testAddDateWithDays() throws IOException { verifyDataRows(result, rows("2020-09-17")); result = executeQuery("select adddate(timestamp('2020-09-16 17:30:00'), 1)"); - verifySchema(result, schema("adddate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); + verifySchema(result, schema("adddate(timestamp('2020-09-16 17:30:00'), 1)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 17:30:00")); - result = executeQuery("select adddate(DATETIME('2020-09-16 07:40:00'), 1)"); - verifySchema(result, schema("adddate(DATETIME('2020-09-16 07:40:00'), 1)", null, "datetime")); + result = executeQuery("select adddate(TIMESTAMP('2020-09-16 07:40:00'), 1)"); + verifySchema(result, schema("adddate(TIMESTAMP('2020-09-16 07:40:00'), 1)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 07:40:00")); result = executeQuery("select adddate(TIME('07:40:00'), 0)"); - verifySchema(result, schema("adddate(TIME('07:40:00'), 0)", null, "datetime")); + verifySchema(result, schema("adddate(TIME('07:40:00'), 0)", null, "timestamp")); verifyDataRows(result, rows(LocalDate.now() + " 07:40:00")); } @@ -112,25 +112,19 @@ public void testAddDateWithInterval() throws IOException { executeQuery("select adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, rows("2020-09-17 17:30:00")); - - result = executeQuery("select adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema( - result, - schema("adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 hour)", null, "timestamp")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 day)", null, "timestamp")); verifyDataRows( result, rows( @@ -141,7 +135,7 @@ public void testAddDateWithInterval() throws IOException { .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "timestamp")); verifyDataRows( result, rows( @@ -157,25 +151,26 @@ public void testDateAdd() throws IOException { executeQuery("select date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 17:30:00")); - result = executeQuery("select date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); + result = executeQuery("select date_add(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("date_add(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 day)"); - verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema( + result, schema("date_add(date('2020-09-16'), interval 1 hour)", null, "timestamp")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 day)", null, "timestamp")); verifyDataRows( result, rows( @@ -186,7 +181,7 @@ public void testDateAdd() throws IOException { .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "timestamp")); verifyDataRows( result, rows( @@ -199,7 +194,7 @@ public void testDateAdd() throws IOException { executeQuery( String.format("SELECT DATE_ADD(birthdate, INTERVAL 1 YEAR) FROM %s", TEST_INDEX_BANK)); - verifySchema(result, schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "datetime")); + verifySchema(result, schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "timestamp")); verifyDataRows( result, rows("2018-10-23 00:00:00"), @@ -217,25 +212,26 @@ public void testDateSub() throws IOException { executeQuery("select date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); - result = executeQuery("select date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); + result = executeQuery("select date_sub(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("date_sub(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 day)"); - verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema( + result, schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "timestamp")); verifyDataRows( result, rows( @@ -246,7 +242,7 @@ public void testDateSub() throws IOException { .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "timestamp")); verifyDataRows( result, rows( @@ -314,11 +310,11 @@ public void testDayOfMonthAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT dayofmonth(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT dayofmonth(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT day_of_month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT day_of_month(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -371,11 +367,11 @@ public void testDayOfWeekAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT dayofweek(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT dayofweek(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT day_of_week(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT day_of_week(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -409,8 +405,8 @@ public void testDayOfYearWithUnderscores() throws IOException { verifySchema(result, schema("day_of_year(date('2020-09-16'))", null, "integer")); verifyDataRows(result, rows(260)); - result = executeQuery("select day_of_year(datetime('2020-09-16 00:00:00'))"); - verifySchema(result, schema("day_of_year(datetime('2020-09-16 00:00:00'))", null, "integer")); + result = executeQuery("select day_of_year(timestamp('2020-09-16 00:00:00'))"); + verifySchema(result, schema("day_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); verifyDataRows(result, rows(260)); result = executeQuery("select day_of_year(timestamp('2020-09-16 00:00:00'))"); @@ -436,11 +432,11 @@ public void testDayOfYearAlternateSyntaxesReturnTheSameResults() throws IOExcept result1 = executeQuery( String.format( - "SELECT dayofyear(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT dayofyear(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT day_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT day_of_year(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -489,10 +485,6 @@ public void testHourOfDayWithUnderscores() throws IOException { verifySchema(result, schema("hour_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); - result = executeQuery("select hour_of_day(datetime('2020-09-16 17:30:00'))"); - verifySchema(result, schema("hour_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); - verifyDataRows(result, rows(17)); - result = executeQuery("select hour_of_day(time('17:30:00'))"); verifySchema(result, schema("hour_of_day(time('17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); @@ -511,7 +503,7 @@ public void testExtractWithDatetime() throws IOException { JSONObject datetimeResult = executeQuery( String.format( - "SELECT extract(DAY_SECOND FROM datetime(cast(datetime0 AS STRING))) FROM %s LIMIT" + "SELECT extract(DAY_SECOND FROM timestamp(cast(datetime0 AS STRING))) FROM %s LIMIT" + " 1", TEST_INDEX_CALCS)); verifyDataRows(datetimeResult, rows(9101735)); @@ -561,11 +553,11 @@ public void testHourFunctionAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT hour(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT hour_of_day(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT hour_of_day(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -663,8 +655,9 @@ public void testMinuteOfDay() throws IOException { result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(1050)); - result = executeQuery("select minute_of_day(datetime('2020-09-16 17:30:00'))"); - verifySchema(result, schema("minute_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); + result = executeQuery("select minute_of_day(timestamp('2020-09-16 17:30:00'))"); + verifySchema( + result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(1050)); result = executeQuery("select minute_of_day(time('17:30:00'))"); @@ -710,11 +703,11 @@ public void testMinuteFunctionAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT minute(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT minute_of_hour(datetime(CAST(time0 AS STRING))) FROM %s", + "SELECT minute_of_hour(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); @@ -750,8 +743,9 @@ public void testMonthOfYearTypes() throws IOException { verifySchema(result, schema("month_of_year(date('2020-09-16'))", null, "integer")); verifyDataRows(result, rows(9)); - result = executeQuery("select month_of_year(datetime('2020-09-16 00:00:00'))"); - verifySchema(result, schema("month_of_year(datetime('2020-09-16 00:00:00'))", null, "integer")); + result = executeQuery("select month_of_year(timestamp('2020-09-16 00:00:00'))"); + verifySchema( + result, schema("month_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); verifyDataRows(result, rows(9)); result = executeQuery("select month_of_year(timestamp('2020-09-16 00:00:00'))"); @@ -778,11 +772,12 @@ public void testMonthAlternateSyntaxesReturnTheSameResults() throws IOException result1 = executeQuery( String.format( - "SELECT month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT month(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT month_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT month_of_year(timestamp(CAST(time0 AS STRING))) FROM %s", + TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -876,11 +871,11 @@ public void testSecondFunctionAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT second(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT second(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT second_of_minute(datetime(CAST(time0 AS STRING))) FROM %s", + "SELECT second_of_minute(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); @@ -934,15 +929,15 @@ public void testSubDateWithDays() throws IOException { verifyDataRows(result, rows("2020-09-15")); result = executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), 1)"); - verifySchema(result, schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); + verifySchema(result, schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); - result = executeQuery("select subdate(DATETIME('2020-09-16 07:40:00'), 1)"); - verifySchema(result, schema("subdate(DATETIME('2020-09-16 07:40:00'), 1)", null, "datetime")); + result = executeQuery("select subdate(TIMESTAMP('2020-09-16 07:40:00'), 1)"); + verifySchema(result, schema("subdate(TIMESTAMP('2020-09-16 07:40:00'), 1)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 07:40:00")); result = executeQuery("select subdate(TIME('07:40:00'), 0)"); - verifySchema(result, schema("subdate(TIME('07:40:00'), 0)", null, "datetime")); + verifySchema(result, schema("subdate(TIME('07:40:00'), 0)", null, "timestamp")); verifyDataRows(result, rows(LocalDate.now() + " 07:40:00")); } @@ -952,25 +947,25 @@ public void testSubDateWithInterval() throws IOException { executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); - result = executeQuery("select subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); + result = executeQuery("select subdate(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("subdate(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 hour)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 day)", null, "timestamp")); verifyDataRows( result, rows( @@ -981,7 +976,7 @@ public void testSubDateWithInterval() throws IOException { .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "timestamp")); verifyDataRows( result, rows( @@ -1045,7 +1040,7 @@ public void testToSeconds() throws IOException { result = executeQuery( String.format( - "SELECT to_seconds(datetime(cast(datetime0 AS string))) FROM %s LIMIT 2", + "SELECT to_seconds(timestamp(cast(datetime0 AS string))) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(63256587455L), rows(63258064234L)); @@ -1142,7 +1137,7 @@ public void testWeekAlternateSyntaxesReturnTheSameResults() throws IOException { result1.getJSONArray("datarows").similar(result3.getJSONArray("datarows")); compareWeekResults("date0", TEST_INDEX_CALCS); - compareWeekResults("datetime(CAST(time0 AS STRING))", TEST_INDEX_CALCS); + compareWeekResults("timestamp(CAST(time0 AS STRING))", TEST_INDEX_CALCS); compareWeekResults("CAST(time0 AS STRING)", TEST_INDEX_CALCS); compareWeekResults("datetime0", TEST_INDEX_CALCS); } @@ -1215,8 +1210,8 @@ public void testFromUnixTime() throws IOException { + "FROM_UNIXTIME(1662601316, '%T') f3"); verifySchema( result, - schema("FROM_UNIXTIME(200300400)", "f1", "datetime"), - schema("FROM_UNIXTIME(12224.12)", "f2", "datetime"), + schema("FROM_UNIXTIME(200300400)", "f1", "timestamp"), + schema("FROM_UNIXTIME(12224.12)", "f2", "timestamp"), schema("FROM_UNIXTIME(1662601316, '%T')", "f3", "keyword")); verifySome( result.getJSONArray("datarows"), @@ -1272,21 +1267,21 @@ public void testAddTime() throws IOException { + " ADDTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' + 0`," + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' + '23:59:59'`," + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' + '00:05:42'`," - + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00')) AS" + " `'15:42:13' + '09:07:00'`"); verifySchema( result, - schema("ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' + 0", "datetime"), + schema("ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' + 0", "timestamp"), schema("ADDTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' + 0", "time"), schema( "ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' + '23:59:59'", - "datetime"), + "timestamp"), schema("ADDTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' + '00:05:42'", "time"), schema( - "ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00'))", "'15:42:13' + '09:07:00'", - "datetime")); + "timestamp")); verifyDataRows( result, rows( @@ -1305,21 +1300,21 @@ public void testSubTime() throws IOException { + " SUBTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' - 0`," + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' - '23:59:59'`," + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' - '00:05:42'`," - + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00')) AS" + " `'15:42:13' - '09:07:00'`"); verifySchema( result, - schema("SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' - 0", "datetime"), + schema("SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' - 0", "timestamp"), schema("SUBTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' - 0", "time"), schema( "SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' - '23:59:59'", - "datetime"), + "timestamp"), schema("SUBTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' - '00:05:42'", "time"), schema( - "SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00'))", "'15:42:13' - '09:07:00'", - "datetime")); + "timestamp")); verifyDataRows( result, rows( @@ -1336,7 +1331,7 @@ public void testDateDiff() throws IOException { "SELECT DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')) AS" + " `'2000-01-02' - '2000-01-01'`, DATEDIFF(DATE('2001-02-01')," + " TIMESTAMP('2004-01-01 00:00:00')) AS `'2001-02-01' - '2004-01-01'`," - + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30')) AS" + + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), TIMESTAMP('2002-02-01 14:25:30')) AS" + " `'2004-01-01' - '2002-02-01'`, DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) AS" + " `today - today`"); verifySchema( @@ -1350,7 +1345,7 @@ public void testDateDiff() throws IOException { "'2001-02-01' - '2004-01-01'", "long"), schema( - "DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))", + "DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), TIMESTAMP('2002-02-01 14:25:30'))", "'2004-01-01' - '2002-02-01'", "long"), schema("DATEDIFF(TIME('23:59:59'), TIME('00:00:00'))", "today - today", "long")); @@ -1446,12 +1441,12 @@ public void testDateBracket() throws IOException { verifyDataRows(result, rows("2020-09-16")); } - private void compareBrackets(String query1, String query2, String datetime) throws IOException { - JSONObject result1 = executeQuery("select " + query1 + " '" + datetime + "'"); - JSONObject result2 = executeQuery("select {" + query2 + " '" + datetime + "'}"); + private void compareBrackets(String query1, String query2, String timestamp) throws IOException { + JSONObject result1 = executeQuery("select " + query1 + " '" + timestamp + "'"); + JSONObject result2 = executeQuery("select {" + query2 + " '" + timestamp + "'}"); - verifyDataRows(result1, rows(datetime)); - verifyDataRows(result2, rows(datetime)); + verifyDataRows(result1, rows(timestamp)); + verifyDataRows(result2, rows(timestamp)); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java index 8ffa1df8f3..490272d950 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java @@ -28,7 +28,7 @@ public void inRangeZeroToStringTZ() throws IOException { executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')"); verifySchema( result, - schema("DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')", null, "datetime")); + schema("DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')", null, "timestamp")); verifyDataRows(result, rows("2008-12-24 21:30:00")); } @@ -36,7 +36,7 @@ public void inRangeZeroToStringTZ() throws IOException { public void inRangeZeroToPositive() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', '+01:00')"); verifySchema( - result, schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "datetime")); + result, schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "timestamp")); verifyDataRows(result, rows("2008-12-25 06:30:00")); } @@ -44,7 +44,7 @@ public void inRangeZeroToPositive() throws IOException { public void inRangeNegativeToPositive() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00-05:00', '+05:00')"); verifySchema( - result, schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "datetime")); + result, schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "timestamp")); verifyDataRows(result, rows("2008-12-25 15:30:00")); } @@ -52,7 +52,7 @@ public void inRangeNegativeToPositive() throws IOException { public void inRangeTwentyHourOffset() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2004-02-28 23:00:00-10:00', '+10:00')"); verifySchema( - result, schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "datetime")); + result, schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "timestamp")); verifyDataRows(result, rows("2004-02-29 19:00:00")); } @@ -60,21 +60,21 @@ public void inRangeTwentyHourOffset() throws IOException { public void inRangeYearChange() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-10:00')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "timestamp")); verifyDataRows(result, rows("2007-12-31 06:00:00")); } @Test public void inRangeZeroNoToTZ() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00')"); - verifySchema(result, schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "timestamp")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @Test public void inRangeZeroNoTZ() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00')"); - verifySchema(result, schema("DATETIME('2008-01-01 02:00:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00')", null, "timestamp")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @@ -82,7 +82,7 @@ public void inRangeZeroNoTZ() throws IOException { public void inRangeZeroDayConvert() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+12:00', '-12:00')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "timestamp")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @@ -90,7 +90,7 @@ public void inRangeZeroDayConvert() throws IOException { public void inRangeJustInRangeNegative() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-13:59')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "timestamp")); verifyDataRows(result, rows("2007-12-31 02:01:00")); } @@ -98,7 +98,7 @@ public void inRangeJustInRangeNegative() throws IOException { public void inRangeJustInRangePositive() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:00', '-10:00')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "timestamp")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @@ -106,7 +106,7 @@ public void inRangeJustInRangePositive() throws IOException { public void nullField3Under() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-14:01')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -114,28 +114,28 @@ public void nullField3Under() throws IOException { public void nullField1Over() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:01', '-10:00')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2021-02-30 10:00:00')"); - verifySchema(result, schema("DATETIME('2021-02-30 10:00:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2021-02-30 10:00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2021-04-31 10:00:00')"); - verifySchema(result, schema("DATETIME('2021-04-31 10:00:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2021-04-31 10:00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2021-13-03 10:00:00')"); - verifySchema(result, schema("DATETIME('2021-13-03 10:00:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2021-13-03 10:00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java index d2798728a1..7129d058c0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java @@ -35,9 +35,8 @@ public void typeof_sql_types() { "SELECT" + " typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + " typeof(CAST('09:07:00' AS TIME))," - + " typeof(CAST('1961-04-12' AS DATE))," - + " typeof(DATETIME('1961-04-12 09:07:00'))"); - verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + + " typeof(CAST('1961-04-12' AS DATE))"); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE")); } @Test diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java index d0a924c494..7e6bee77c2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java @@ -375,7 +375,6 @@ public static boolean isDateTypeCompatible(ExprType exprType) { } switch ((ExprCoreType) exprType) { case TIMESTAMP: - case DATETIME: case DATE: case TIME: return true; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java index 22c2ece4a7..3341e01ab2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java @@ -8,7 +8,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -20,7 +19,6 @@ import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER; import static org.opensearch.sql.utils.DateTimeFormatters.STRICT_HOUR_MINUTE_SECOND_FORMATTER; import static org.opensearch.sql.utils.DateTimeFormatters.STRICT_YEAR_MONTH_DAY_FORMATTER; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -29,6 +27,7 @@ import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; @@ -132,8 +131,6 @@ public void extendTypeMapping(Map typeMapping) { .put( OpenSearchDateType.of(TIMESTAMP), OpenSearchExprValueFactory::createOpenSearchDateType) - .put( - OpenSearchDateType.of(DATETIME), OpenSearchExprValueFactory::createOpenSearchDateType) .put( OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), (c, dt) -> new OpenSearchExprIpValue(c.stringValue())) @@ -241,11 +238,12 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da ZonedDateTime zonedDateTime = DateFormatters.from(accessor); switch (returnFormat) { case TIME: - return new ExprTimeValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); + return new ExprTimeValue(zonedDateTime.withZoneSameLocal(ZoneOffset.UTC).toLocalTime()); case DATE: - return new ExprDateValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); + return new ExprDateValue(zonedDateTime.withZoneSameLocal(ZoneOffset.UTC).toLocalDate()); default: - return new ExprTimestampValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); + return new ExprTimestampValue( + zonedDateTime.withZoneSameLocal(ZoneOffset.UTC).toInstant()); } } catch (IllegalArgumentException ignored) { // nothing to do, try another format @@ -291,9 +289,9 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) Instant instant = Instant.ofEpochMilli(epochMillis); switch ((ExprCoreType) returnFormat) { case TIME: - return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); + return new ExprTimeValue(LocalTime.from(instant.atZone(ZoneOffset.UTC))); case DATE: - return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); + return new ExprDateValue(LocalDate.ofInstant(instant, ZoneOffset.UTC)); default: return new ExprTimestampValue(instant); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java index 7e7b2e959a..06cca5dcc6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java @@ -49,7 +49,6 @@ public Object execute() { // Can't get timestamp from `ExprTimeValue` return MILLIS.between(LocalTime.MIN, expr.timeValue()); case DATE: - case DATETIME: case TIMESTAMP: return expr.timestampValue().toEpochMilli(); default: diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java index 4485626742..ff66ec425a 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java @@ -6,7 +6,6 @@ package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; @@ -66,7 +65,7 @@ private CompositeValuesSourceBuilder buildCompositeValuesSourceBuilder( .missingOrder(missingOrder) .order(sortOrder); // Time types values are converted to LONG in ExpressionAggregationScript::execute - if (List.of(TIMESTAMP, TIME, DATE, DATETIME).contains(expr.getDelegated().type())) { + if (List.of(TIMESTAMP, TIME, DATE).contains(expr.getDelegated().type())) { sourceBuilder.userValuetypeHint(ValueType.LONG); } return helper.build(expr.getDelegated(), sourceBuilder::field, sourceBuilder::script); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java index 753c2bbbc7..11533c754e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java @@ -14,7 +14,6 @@ import org.opensearch.sql.data.model.ExprBooleanValue; import org.opensearch.sql.data.model.ExprByteValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprFloatValue; import org.opensearch.sql.data.model.ExprIntegerValue; @@ -225,15 +224,6 @@ private ExprValue cast(FunctionExpression castFunction) { return new ExprTimeValue(expr.valueOf().timeValue()); } }) - .put( - BuiltinFunctionName.CAST_TO_DATETIME.getName(), - expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprDatetimeValue(expr.valueOf().stringValue()); - } else { - return new ExprDatetimeValue(expr.valueOf().datetimeValue()); - } - }) .put( BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), expr -> { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java index a9511f8c0b..34738224e7 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java @@ -12,7 +12,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import static org.opensearch.sql.opensearch.data.type.OpenSearchDateType.SUPPORTED_NAMED_DATETIME_FORMATS; @@ -43,14 +42,14 @@ class OpenSearchDateTypeTest { private static final String timeFormatString = "hourMinuteSecond"; - private static final String datetimeFormatString = "basic_date_time"; + private static final String timestampFormatString = "basic_date_time"; private static final OpenSearchDateType defaultDateType = OpenSearchDateType.of(defaultFormatString); private static final OpenSearchDateType dateDateType = OpenSearchDateType.of(dateFormatString); private static final OpenSearchDateType timeDateType = OpenSearchDateType.of(timeFormatString); private static final OpenSearchDateType datetimeDateType = - OpenSearchDateType.of(datetimeFormatString); + OpenSearchDateType.of(timestampFormatString); @Test public void isCompatible() { @@ -59,25 +58,16 @@ public void isCompatible() { () -> assertTrue(TIMESTAMP.isCompatible(defaultDateType)), () -> assertTrue(TIMESTAMP.isCompatible(dateDateType)), () -> assertTrue(TIMESTAMP.isCompatible(timeDateType)), - () -> assertTrue(TIMESTAMP.isCompatible(datetimeDateType)), - - // datetime - () -> assertFalse(DATETIME.isCompatible(defaultDateType)), - () -> assertTrue(DATETIME.isCompatible(dateDateType)), - () -> assertTrue(DATETIME.isCompatible(timeDateType)), - () -> assertFalse(DATETIME.isCompatible(datetimeDateType)), // time type () -> assertFalse(TIME.isCompatible(defaultDateType)), () -> assertFalse(TIME.isCompatible(dateDateType)), () -> assertTrue(TIME.isCompatible(timeDateType)), - () -> assertFalse(TIME.isCompatible(datetimeDateType)), // date type () -> assertFalse(DATE.isCompatible(defaultDateType)), () -> assertTrue(DATE.isCompatible(dateDateType)), - () -> assertFalse(DATE.isCompatible(timeDateType)), - () -> assertFalse(DATE.isCompatible(datetimeDateType))); + () -> assertFalse(DATE.isCompatible(timeDateType))); } // `typeName` and `legacyTypeName` return the same thing for date objects: @@ -88,8 +78,7 @@ public void check_typeName() { // always use the MappingType of "DATE" () -> assertEquals("DATE", defaultDateType.typeName()), () -> assertEquals("DATE", timeDateType.typeName()), - () -> assertEquals("DATE", dateDateType.typeName()), - () -> assertEquals("DATE", datetimeDateType.typeName())); + () -> assertEquals("DATE", dateDateType.typeName())); } @Test @@ -98,8 +87,7 @@ public void check_legacyTypeName() { // always use the legacy "DATE" type () -> assertEquals("DATE", defaultDateType.legacyTypeName()), () -> assertEquals("DATE", timeDateType.legacyTypeName()), - () -> assertEquals("DATE", dateDateType.legacyTypeName()), - () -> assertEquals("DATE", datetimeDateType.legacyTypeName())); + () -> assertEquals("DATE", dateDateType.legacyTypeName())); } @Test @@ -108,8 +96,7 @@ public void check_exprTypeName() { // exprType changes based on type (no datetime): () -> assertEquals(TIMESTAMP, defaultDateType.getExprType()), () -> assertEquals(TIME, timeDateType.getExprType()), - () -> assertEquals(DATE, dateDateType.getExprType()), - () -> assertEquals(TIMESTAMP, datetimeDateType.getExprType())); + () -> assertEquals(DATE, dateDateType.getExprType())); } private static Stream getAllSupportedFormats() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java index bfc06b94c0..83e26f85e4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java @@ -24,7 +24,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -34,7 +33,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -42,6 +40,7 @@ import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; +import java.time.ZoneOffset; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -50,7 +49,6 @@ import org.junit.jupiter.api.Test; import org.opensearch.sql.data.model.ExprCollectionValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprTupleValue; @@ -72,7 +70,6 @@ class OpenSearchExprValueFactoryTest { .put("doubleV", OpenSearchDataType.of(DOUBLE)) .put("stringV", OpenSearchDataType.of(STRING)) .put("dateV", OpenSearchDateType.of(DATE)) - .put("datetimeV", OpenSearchDateType.of(DATETIME)) .put("timeV", OpenSearchDateType.of(TIME)) .put("timestampV", OpenSearchDateType.of(TIMESTAMP)) .put("datetimeDefaultV", OpenSearchDateType.of()) @@ -248,7 +245,7 @@ public void constructDates() { () -> assertEquals( new ExprDateValue( - LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), + LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), ZoneOffset.UTC)), constructFromObject("dateV", 450576000000L)), () -> assertEquals( @@ -270,7 +267,7 @@ public void constructTimes() { () -> assertEquals( new ExprTimeValue( - LocalTime.from(Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), + LocalTime.from(Instant.ofEpochMilli(1420070400001L).atZone(ZoneOffset.UTC))), constructFromObject("timeV", 1420070400001L)), () -> assertEquals( @@ -337,14 +334,6 @@ public void constructDatetime() { assertEquals( new ExprTimestampValue("2015-01-01 12:10:30"), constructFromObject("timestampV", "2015-01-01 12:10:30")), - () -> - assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeV", "2015-01-01 12:10:30")), - () -> - assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), () -> assertEquals( new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), @@ -366,7 +355,7 @@ public void constructDatetime() { @Test public void constructDatetime_fromCustomFormat() { assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), + new ExprTimestampValue("2015-01-01 12:10:30"), constructFromObject("customFormatV", "2015-01-01-12-10-30")); IllegalArgumentException exception = @@ -378,11 +367,11 @@ public void constructDatetime_fromCustomFormat() { exception.getMessage()); assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), + new ExprTimestampValue("2015-01-01 12:10:30"), constructFromObject("customAndEpochMillisV", "2015-01-01 12:10:30")); assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), + new ExprTimestampValue("2015-01-01 12:10:30"), constructFromObject("customAndEpochMillisV", "2015-01-01-12-10-30")); } @@ -626,7 +615,7 @@ public void constructBinaryArrayReturnsFirstIndex() { @Test public void constructArrayOfCustomEpochMillisReturnsFirstIndex() { assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), + new ExprTimestampValue("2015-01-01 12:10:30"), tupleValue("{\"customAndEpochMillisV\":[\"2015-01-01 12:10:30\",\"1999-11-09 01:09:44\"]}") .get("customAndEpochMillisV")); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java index 6485dce124..1bb988dacd 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java @@ -13,7 +13,6 @@ import static org.mockito.Mockito.doAnswer; import static org.opensearch.sql.common.utils.StringUtils.format; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -150,20 +149,6 @@ void should_build_type_mapping_for_field_reference() { map("name", OpenSearchDataType.of(STRING)))); } - @Test - void should_build_type_mapping_for_datetime_type() { - assertThat( - buildTypeMapping( - Arrays.asList( - named( - "avg(datetime)", - new AvgAggregator(Arrays.asList(ref("datetime", DATETIME)), DATETIME))), - Arrays.asList(named("datetime", ref("datetime", DATETIME)))), - containsInAnyOrder( - map("avg(datetime)", OpenSearchDateType.of(DATETIME)), - map("datetime", OpenSearchDateType.of(DATETIME)))); - } - @Test void should_build_type_mapping_for_timestamp_type() { assertThat( diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java index 520e301301..6d90cce704 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java @@ -34,7 +34,6 @@ import org.opensearch.search.lookup.LeafSearchLookup; import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.Expression; @@ -113,14 +112,6 @@ void can_execute_expression_interpret_dates_for_aggregation() { .shouldMatch(new ExprDateValue(LocalDate.of(1961, 4, 12)).timestampValue().toEpochMilli()); } - @Test - void can_execute_expression_interpret_datetimes_for_aggregation() { - assertThat() - .docValues("datetime", "1984-03-17 22:16:42") - .evaluate(DSL.datetime(ref("datetime", STRING))) - .shouldMatch(new ExprDatetimeValue("1984-03-17 22:16:42").timestampValue().toEpochMilli()); - } - @Test void can_execute_expression_interpret_times_for_aggregation() { assertThat() diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java index d11d7da2fe..4250b3297f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java @@ -137,7 +137,7 @@ void should_build_bucket_with_parse_expression() { @ParameterizedTest(name = "{0}") @EnumSource( value = ExprCoreType.class, - names = {"TIMESTAMP", "TIME", "DATE", "DATETIME"}) + names = {"TIMESTAMP", "TIME", "DATE"}) void terms_bucket_for_datetime_types_uses_long(ExprType dataType) { assertEquals( "{\n" diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java index cca51c8f4a..df754887cf 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java @@ -15,7 +15,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -116,15 +115,6 @@ void can_execute_expression_with_timestamp_field() { .shouldMatch(); } - @Test - void can_execute_expression_with_datetime_field() { - ExprTimestampValue ts = new ExprTimestampValue("2020-08-04 10:00:00"); - assertThat() - .docValues("birthday", ZonedDateTime.parse("2020-08-04T10:00:00Z")) - .filterBy(DSL.equal(ref("birthday", DATETIME), new LiteralExpression(ts))) - .shouldMatch(); - } - @Test void can_execute_expression_with_date_field() { ExprDateValue date = new ExprDateValue("2020-08-04"); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java index 1fc2d5ee29..90b982e017 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java @@ -13,7 +13,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -42,7 +41,6 @@ import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.common.utils.StringUtils; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprTupleValue; @@ -1787,7 +1785,7 @@ void cast_to_date_in_filter() { buildQuery( DSL.equal( ref("date_value", DATE), - DSL.castDate(literal(new ExprDatetimeValue("2021-11-08 17:00:00")))))); + DSL.castDate(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } @Test @@ -1817,32 +1815,6 @@ void cast_to_time_in_filter() { DSL.castTime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } - @Test - void cast_to_datetime_in_filter() { - String json = - "{\n" - + " \"term\" : {\n" - + " \"datetime_value\" : {\n" - + " \"value\" : \"2021-11-08 17:00:00\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; - - assertJsonEquals( - json, - buildQuery( - DSL.equal( - ref("datetime_value", DATETIME), - DSL.castDatetime(literal("2021-11-08 17:00:00"))))); - assertJsonEquals( - json, - buildQuery( - DSL.equal( - ref("datetime_value", DATETIME), - DSL.castDatetime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); - } - @Test void cast_to_timestamp_in_filter() { String json = From 7e3a718f1b7d9100fbac2ee8317fd35042b63b39 Mon Sep 17 00:00:00 2001 From: Yury-Fridlyand Date: Mon, 21 Aug 2023 10:25:12 -0700 Subject: [PATCH 6/6] Run IT tests with security plugin (#335) (#1986) * Run IT tests with security plugin (#335) * Add extra IT flow. Signed-off-by: Yury-Fridlyand * Remove unneeded files. Signed-off-by: Yury-Fridlyand * Typo fix. Signed-off-by: Yury-Fridlyand * Fix GHA matrix syntax. Signed-off-by: Yury-Fridlyand * Fix GHA matrix syntax. Signed-off-by: Yury-Fridlyand * Code clean up. Signed-off-by: Yury-Fridlyand * Optimize downloading. Signed-off-by: Yury-Fridlyand * Apply suggestions from code review Signed-off-by: Yury-Fridlyand Co-authored-by: Andrew Carbonetto * Update integ-test/build.gradle Signed-off-by: Yury-Fridlyand Co-authored-by: Andrew Carbonetto * Typo fix. Signed-off-by: Yury-Fridlyand * Rework implementation. Signed-off-by: Yury-Fridlyand * Address PR review. Signed-off-by: Yury-Fridlyand * Address PR feedback + some fixes. Signed-off-by: Yury-Fridlyand --------- Signed-off-by: Yury-Fridlyand Co-authored-by: Andrew Carbonetto * Minor fix. Signed-off-by: Yury-Fridlyand * Address PR feedback. Signed-off-by: Yury-Fridlyand * Typo fix. Signed-off-by: Yury-Fridlyand --------- Signed-off-by: Yury-Fridlyand Co-authored-by: Andrew Carbonetto --- .../workflows/integ-tests-with-security.yml | 43 +++++ integ-test/build.gradle | 163 +++++++++++++++++- .../sql/legacy/OpenSearchSQLRestTestCase.java | 85 +++++---- .../CrossClusterSearchIT.java | 52 ++++-- 4 files changed, 293 insertions(+), 50 deletions(-) create mode 100644 .github/workflows/integ-tests-with-security.yml rename integ-test/src/test/java/org/opensearch/sql/{ppl => security}/CrossClusterSearchIT.java (79%) diff --git a/.github/workflows/integ-tests-with-security.yml b/.github/workflows/integ-tests-with-security.yml new file mode 100644 index 0000000000..0d54b8cfef --- /dev/null +++ b/.github/workflows/integ-tests-with-security.yml @@ -0,0 +1,43 @@ +name: Security Plugin IT + +on: + pull_request: + push: + branches-ignore: + - 'dependabot/**' + paths: + - 'integ-test/**' + - '.github/workflows/integ-tests-with-security.yml' + +jobs: + security-it: + strategy: + fail-fast: false + matrix: + os: [ ubuntu-latest, windows-latest, macos-latest ] + java: [ 11, 17 ] + + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + + - name: Set up JDK ${{ matrix.java }} + uses: actions/setup-java@v3 + with: + distribution: 'temurin' + java-version: ${{ matrix.java }} + + - name: Build with Gradle + run: ./gradlew integTestWithSecurity + + - name: Upload test reports + if: ${{ always() }} + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: test-reports-${{ matrix.os }}-${{ matrix.java }} + path: | + integ-test/build/reports/** + integ-test/build/testclusters/*/logs/* + integ-test/build/testclusters/*/config/* diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 6ee9cb425e..7cb0983670 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -24,7 +24,10 @@ import org.opensearch.gradle.test.RestIntegTestTask import org.opensearch.gradle.testclusters.StandaloneRestIntegTestTask +import org.opensearch.gradle.testclusters.OpenSearchCluster +import groovy.xml.XmlParser +import java.nio.file.Paths import java.util.concurrent.Callable import java.util.stream.Collectors @@ -62,6 +65,81 @@ ext { projectSubstitutions = [:] licenseFile = rootProject.file('LICENSE.TXT') noticeFile = rootProject.file('NOTICE') + + getSecurityPluginDownloadLink = { -> + var repo = "https://aws.oss.sonatype.org/content/repositories/snapshots/org/opensearch/plugin/" + + "opensearch-security/$opensearch_build/" + var metadataFile = Paths.get(projectDir.toString(), "build", "maven-metadata.xml").toAbsolutePath().toFile() + download.run { + src repo + "maven-metadata.xml" + dest metadataFile + } + def metadata = new XmlParser().parse(metadataFile) + def securitySnapshotVersion = metadata.versioning.snapshotVersions[0].snapshotVersion[0].value[0].text() + + return repo + "opensearch-security-${securitySnapshotVersion}.zip" + } + + File downloadedSecurityPlugin = null + + configureSecurityPlugin = { OpenSearchCluster cluster -> + + cluster.getNodes().forEach { node -> + var creds = node.getCredentials() + if (creds.isEmpty()) { + creds.add(Map.of('useradd', 'admin', '-p', 'admin')) + } else { + creds.get(0).putAll(Map.of('useradd', 'admin', '-p', 'admin')) + } + } + + var projectAbsPath = projectDir.getAbsolutePath() + + // add a check to avoid re-downloading multiple times during single test run + if (downloadedSecurityPlugin == null) { + downloadedSecurityPlugin = Paths.get(projectAbsPath, 'bin', 'opensearch-security-snapshot.zip').toFile() + download.run { + src getSecurityPluginDownloadLink() + dest downloadedSecurityPlugin + } + } + + // Config below including files are copied from security demo configuration + ['esnode.pem', 'esnode-key.pem', 'root-ca.pem'].forEach { file -> + File local = Paths.get(projectAbsPath, 'bin', file).toFile() + download.run { + src "https://raw.githubusercontent.com/opensearch-project/security/main/bwc-test/src/test/resources/security/" + file + dest local + overwrite false + } + cluster.extraConfigFile file, local + } + [ + // config copied from security plugin demo configuration + 'plugins.security.ssl.transport.pemcert_filepath' : 'esnode.pem', + 'plugins.security.ssl.transport.pemkey_filepath' : 'esnode-key.pem', + 'plugins.security.ssl.transport.pemtrustedcas_filepath' : 'root-ca.pem', + 'plugins.security.ssl.transport.enforce_hostname_verification' : 'false', + // https is disabled to simplify test debugging + 'plugins.security.ssl.http.enabled' : 'false', + 'plugins.security.ssl.http.pemcert_filepath' : 'esnode.pem', + 'plugins.security.ssl.http.pemkey_filepath' : 'esnode-key.pem', + 'plugins.security.ssl.http.pemtrustedcas_filepath' : 'root-ca.pem', + 'plugins.security.allow_unsafe_democertificates' : 'true', + + 'plugins.security.allow_default_init_securityindex' : 'true', + 'plugins.security.authcz.admin_dn' : 'CN=kirk,OU=client,O=client,L=test,C=de', + 'plugins.security.audit.type' : 'internal_opensearch', + 'plugins.security.enable_snapshot_restore_privilege' : 'true', + 'plugins.security.check_snapshot_restore_write_privileges' : 'true', + 'plugins.security.restapi.roles_enabled' : '["all_access", "security_rest_api_access"]', + 'plugins.security.system_indices.enabled' : 'true' + ].forEach { name, value -> + cluster.setting name, value + } + + cluster.plugin provider((Callable) (() -> (RegularFile) (() -> downloadedSecurityPlugin))) + } } tasks.withType(licenseHeaders.class) { @@ -108,6 +186,7 @@ dependencies { testImplementation group: 'com.h2database', name: 'h2', version: '2.2.220' testImplementation group: 'org.xerial', name: 'sqlite-jdbc', version: '3.41.2.2' testImplementation group: 'com.google.code.gson', name: 'gson', version: '2.8.9' + testCompileOnly 'org.apiguardian:apiguardian-api:1.1.2' // Needed for BWC tests zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${bwcVersion}-SNAPSHOT" @@ -128,21 +207,28 @@ compileTestJava { } testClusters.all { - testDistribution = 'archive' - // debug with command, ./gradlew opensearch-sql:run -DdebugJVM. --debug-jvm does not work with keystore. if (System.getProperty("debugJVM") != null) { jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005' } } -testClusters.integTest { - plugin ":opensearch-sql-plugin" - setting "plugins.query.datasources.encryption.masterkey", "1234567812345678" -} - testClusters { + integTest { + testDistribution = 'archive' + plugin ":opensearch-sql-plugin" + setting "plugins.query.datasources.encryption.masterkey", "1234567812345678" + } remoteCluster { + testDistribution = 'archive' + plugin ":opensearch-sql-plugin" + } + integTestWithSecurity { + testDistribution = 'archive' + plugin ":opensearch-sql-plugin" + } + remoteIntegTestWithSecurity { + testDistribution = 'archive' plugin ":opensearch-sql-plugin" } } @@ -223,6 +309,65 @@ task integJdbcTest(type: RestIntegTestTask) { } } +task integTestWithSecurity(type: RestIntegTestTask) { + useCluster testClusters.integTestWithSecurity + useCluster testClusters.remoteIntegTestWithSecurity + + systemProperty "cluster.names", + getClusters().stream().map(cluster -> cluster.getName()).collect(Collectors.joining(",")) + + getClusters().forEach { cluster -> + configureSecurityPlugin(cluster) + } + + useJUnitPlatform() + dependsOn ':opensearch-sql-plugin:bundlePlugin' + testLogging { + events "passed", "skipped", "failed" + } + afterTest { desc, result -> + logger.quiet "${desc.className}.${desc.name}: ${result.resultType} ${(result.getEndTime() - result.getStartTime())/1000}s" + } + + systemProperty 'tests.security.manager', 'false' + systemProperty 'project.root', project.projectDir.absolutePath + + // Set default query size limit + systemProperty 'defaultQuerySizeLimit', '10000' + + // Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for + // requests. The 'doFirst' delays reading the debug setting on the cluster till execution time. + doFirst { + systemProperty 'cluster.debug', getDebug() + getClusters().forEach { cluster -> + + String allTransportSocketURI = cluster.nodes.stream().flatMap { node -> + node.getAllTransportPortURI().stream() + }.collect(Collectors.joining(",")) + String allHttpSocketURI = cluster.nodes.stream().flatMap { node -> + node.getAllHttpSocketURI().stream() + }.collect(Collectors.joining(",")) + + systemProperty "tests.rest.${cluster.name}.http_hosts", "${-> allHttpSocketURI}" + systemProperty "tests.rest.${cluster.name}.transport_hosts", "${-> allTransportSocketURI}" + } + + systemProperty "https", "false" + systemProperty "user", "admin" + systemProperty "password", "admin" + } + + if (System.getProperty("test.debug") != null) { + jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005' + } + + // NOTE: this IT config discovers only junit5 (jupiter) tests. + // https://github.com/opensearch-project/sql/issues/1974 + filter { + includeTestsMatching 'org.opensearch.sql.security.CrossClusterSearchIT' + } +} + // Run PPL ITs and new, legacy and comparison SQL ITs with new SQL engine enabled integTest { useCluster testClusters.remoteCluster @@ -305,8 +450,8 @@ integTest { // Exclude JDBC related tests exclude 'org/opensearch/sql/jdbc/**' - // Exclude this IT until running IT with security plugin enabled is ready - exclude 'org/opensearch/sql/ppl/CrossClusterSearchIT.class' + // Exclude this IT, because they executed in another task (:integTestWithSecurity) + exclude 'org/opensearch/sql/security/**' } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java index 385c9bc6ba..d73e3468d4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java @@ -5,8 +5,6 @@ package org.opensearch.sql.legacy; -import static java.util.Collections.unmodifiableList; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -49,8 +47,22 @@ public abstract class OpenSearchSQLRestTestCase extends OpenSearchRestTestCase { private static final Logger LOG = LogManager.getLogger(); - public static final String REMOTE_CLUSTER = "remoteCluster"; public static final String MATCH_ALL_REMOTE_CLUSTER = "*"; + // Requires to insert cluster name and cluster transport address (host:port) + public static final String REMOTE_CLUSTER_SETTING = + "{" + + "\"persistent\": {" + + " \"cluster\": {" + + " \"remote\": {" + + " \"%s\": {" + + " \"seeds\": [" + + " \"%s\"" + + " ]" + + " }" + + " }" + + " }" + + "}" + + "}"; private static RestClient remoteClient; @@ -106,27 +118,24 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE } // Modified from initClient in OpenSearchRestTestCase - public void initRemoteClient() throws IOException { - if (remoteClient == null) { - assert remoteAdminClient == null; - String cluster = getTestRestCluster(REMOTE_CLUSTER); - String[] stringUrls = cluster.split(","); - List hosts = new ArrayList<>(stringUrls.length); - for (String stringUrl : stringUrls) { - int portSeparator = stringUrl.lastIndexOf(':'); - if (portSeparator < 0) { - throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]"); - } - String host = stringUrl.substring(0, portSeparator); - int port = Integer.valueOf(stringUrl.substring(portSeparator + 1)); - hosts.add(buildHttpHost(host, port)); + public void initRemoteClient(String clusterName) throws IOException { + remoteClient = remoteAdminClient = initClient(clusterName); + } + + /** Configure http client for the given cluster. */ + public RestClient initClient(String clusterName) throws IOException { + String[] stringUrls = getTestRestCluster(clusterName).split(","); + List hosts = new ArrayList<>(stringUrls.length); + for (String stringUrl : stringUrls) { + int portSeparator = stringUrl.lastIndexOf(':'); + if (portSeparator < 0) { + throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]"); } - final List clusterHosts = unmodifiableList(hosts); - remoteClient = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0])); - remoteAdminClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[0])); + String host = stringUrl.substring(0, portSeparator); + int port = Integer.parseInt(stringUrl.substring(portSeparator + 1)); + hosts.add(buildHttpHost(host, port)); } - assert remoteClient != null; - assert remoteAdminClient != null; + return buildClient(restClientSettings(), hosts.toArray(new HttpHost[0])); } /** Get a comma delimited list of [host:port] to which to send REST requests. */ @@ -200,6 +209,27 @@ protected static void wipeAllOpenSearchIndices(RestClient client) throws IOExcep } } + /** + * Configure authentication and pass builder to superclass to configure other stuff.
+ * By default, auth is configure when https is set only. + */ + protected static void configureClient(RestClientBuilder builder, Settings settings) + throws IOException { + String userName = System.getProperty("user"); + String password = System.getProperty("password"); + if (userName != null && password != null) { + builder.setHttpClientConfigCallback( + httpClientBuilder -> { + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + new AuthScope(null, -1), + new UsernamePasswordCredentials(userName, password.toCharArray())); + return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + }); + } + OpenSearchRestTestCase.configureClient(builder, settings); + } + protected static void configureHttpsClient( RestClientBuilder builder, Settings settings, HttpHost httpHost) throws IOException { Map headers = ThreadContext.buildDefaultHeaders(settings); @@ -259,16 +289,13 @@ protected static void configureHttpsClient( * Initialize rest client to remote cluster, and create a connection to it from the coordinating * cluster. */ - public void configureMultiClusters() throws IOException { - initRemoteClient(); + public void configureMultiClusters(String remote) throws IOException { + initRemoteClient(remote); Request connectionRequest = new Request("PUT", "_cluster/settings"); String connectionSetting = - "{\"persistent\": {\"cluster\": {\"remote\": {\"" - + REMOTE_CLUSTER - + "\": {\"seeds\": [\"" - + getTestTransportCluster(REMOTE_CLUSTER).split(",")[0] - + "\"]}}}}}"; + String.format( + REMOTE_CLUSTER_SETTING, remote, getTestTransportCluster(remote).split(",")[0]); connectionRequest.setJsonEntity(connectionSetting); adminClient().performRequest(connectionRequest); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java b/integ-test/src/test/java/org/opensearch/sql/security/CrossClusterSearchIT.java similarity index 79% rename from integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java rename to integ-test/src/test/java/org/opensearch/sql/security/CrossClusterSearchIT.java index 19e3debdf0..086f32cba7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/security/CrossClusterSearchIT.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package org.opensearch.sql.ppl; +package org.opensearch.sql.security; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -14,15 +14,30 @@ import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import java.io.IOException; +import lombok.SneakyThrows; import org.json.JSONObject; -import org.junit.Rule; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.rules.ExpectedException; import org.opensearch.client.ResponseException; +import org.opensearch.sql.ppl.PPLIntegTestCase; +/** Cross Cluster Search tests to be executed with security plugin. */ public class CrossClusterSearchIT extends PPLIntegTestCase { - @Rule public ExpectedException exceptionRule = ExpectedException.none(); + static { + // find a remote cluster + String[] clusterNames = System.getProperty("cluster.names").split(","); + var remote = "remoteCluster"; + for (var cluster : clusterNames) { + if (cluster.startsWith("remote")) { + remote = cluster; + break; + } + } + REMOTE_CLUSTER = remote; + } + + public static final String REMOTE_CLUSTER; private static final String TEST_INDEX_BANK_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_BANK; private static final String TEST_INDEX_DOG_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; @@ -30,14 +45,25 @@ public class CrossClusterSearchIT extends PPLIntegTestCase { MATCH_ALL_REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; private static final String TEST_INDEX_ACCOUNT_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_ACCOUNT; + private static boolean initialized = false; + + @SneakyThrows + @BeforeEach + public void initialize() { + if (!initialized) { + setUpIndices(); + initialized = true; + } + } + @Override - public void init() throws IOException { - configureMultiClusters(); + protected void init() throws Exception { + configureMultiClusters(REMOTE_CLUSTER); loadIndex(Index.BANK); loadIndex(Index.BANK, remoteClient()); loadIndex(Index.DOG); loadIndex(Index.DOG, remoteClient()); - loadIndex(Index.ACCOUNT, remoteClient()); + loadIndex(Index.ACCOUNT); } @Test @@ -55,11 +81,13 @@ public void testMatchAllCrossClusterSearchAllFields() throws IOException { @Test public void testCrossClusterSearchWithoutLocalFieldMappingShouldFail() throws IOException { - exceptionRule.expect(ResponseException.class); - exceptionRule.expectMessage("400 Bad Request"); - exceptionRule.expectMessage("IndexNotFoundException"); - - executeQuery(String.format("search source=%s", TEST_INDEX_ACCOUNT_REMOTE)); + var exception = + assertThrows( + ResponseException.class, + () -> executeQuery(String.format("search source=%s", TEST_INDEX_ACCOUNT_REMOTE))); + assertTrue( + exception.getMessage().contains("IndexNotFoundException") + && exception.getMessage().contains("400 Bad Request")); } @Test