diff --git a/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java b/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java index b53d17b38f..699d0ec76a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java +++ b/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java @@ -45,12 +45,13 @@ /** * Default implementor for implementing logical to physical translation. "Default" here means all - * logical operator will be translated to correspondent physical operator to pipeline operations in - * post-processing style in memory. Different storage can override methods here to optimize default - * pipelining operator, for example a storage has the flexibility to override visitFilter and - * visitRelation to push down filtering operation and return a single physical index scan operator. + * logical operator will be translated to correspondent physical operator to pipeline operations + * in post-processing style in memory. + * Different storage can override methods here to optimize default pipelining operator, for example + * a storage has the flexibility to override visitFilter and visitRelation to push down filtering + * operation and return a single physical index scan operator. * - * @param context type + * @param context type */ public class DefaultImplementor extends LogicalPlanNodeVisitor { @@ -61,7 +62,8 @@ public PhysicalPlan visitRareTopN(LogicalRareTopN node, C context) { node.getCommandType(), node.getNoOfResults(), node.getFieldList(), - node.getGroupByList()); + node.getGroupByList() + ); } @Override @@ -76,14 +78,16 @@ public PhysicalPlan visitDedupe(LogicalDedupe node, C context) { @Override public PhysicalPlan visitProject(LogicalProject node, C context) { - return new ProjectOperator( - visitChild(node, context), node.getProjectList(), node.getNamedParseExpressions()); + return new ProjectOperator(visitChild(node, context), node.getProjectList(), + node.getNamedParseExpressions()); } @Override public PhysicalPlan visitWindow(LogicalWindow node, C context) { return new WindowOperator( - visitChild(node, context), node.getWindowFunction(), node.getWindowDefinition()); + visitChild(node, context), + node.getWindowFunction(), + node.getWindowDefinition()); } @Override @@ -144,9 +148,8 @@ public PhysicalPlan visitTableWriteBuilder(TableWriteBuilder plan, C context) { @Override public PhysicalPlan visitRelation(LogicalRelation node, C context) { - throw new UnsupportedOperationException( - "Storage engine is responsible for " - + "implementing and optimizing logical plan with relation involved"); + throw new UnsupportedOperationException("Storage engine is responsible for " + + "implementing and optimizing logical plan with relation involved"); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/PlanContext.java b/core/src/main/java/org/opensearch/sql/planner/PlanContext.java index 38f2bde244..3d43c02d61 100644 --- a/core/src/main/java/org/opensearch/sql/planner/PlanContext.java +++ b/core/src/main/java/org/opensearch/sql/planner/PlanContext.java @@ -9,10 +9,13 @@ import lombok.Getter; import org.opensearch.sql.storage.split.Split; -/** Plan context hold planning related information. */ +/** + * Plan context hold planning related information. + */ public class PlanContext { - @Getter private final Optional split; + @Getter + private final Optional split; public PlanContext(Split split) { this.split = Optional.of(split); diff --git a/core/src/main/java/org/opensearch/sql/planner/PlanNode.java b/core/src/main/java/org/opensearch/sql/planner/PlanNode.java index a79997cd7f..8cd6e088e5 100644 --- a/core/src/main/java/org/opensearch/sql/planner/PlanNode.java +++ b/core/src/main/java/org/opensearch/sql/planner/PlanNode.java @@ -3,11 +3,14 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner; import java.util.List; -/** The definition of Plan Node. */ +/** + * The definition of Plan Node. + */ public interface PlanNode { /** diff --git a/core/src/main/java/org/opensearch/sql/planner/Planner.java b/core/src/main/java/org/opensearch/sql/planner/Planner.java index 0d7725ca56..8333425091 100644 --- a/core/src/main/java/org/opensearch/sql/planner/Planner.java +++ b/core/src/main/java/org/opensearch/sql/planner/Planner.java @@ -3,8 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner; + import java.util.List; import lombok.RequiredArgsConstructor; import org.opensearch.sql.planner.logical.LogicalPlan; @@ -14,16 +16,18 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.storage.Table; -/** Planner that plans and chooses the optimal physical plan. */ +/** + * Planner that plans and chooses the optimal physical plan. + */ @RequiredArgsConstructor public class Planner { private final LogicalPlanOptimizer logicalOptimizer; /** - * Generate optimal physical plan for logical plan. If no table involved, translate logical plan - * to physical by default implementor. TODO: for now just delegate entire logical plan to storage - * engine. + * Generate optimal physical plan for logical plan. If no table involved, + * translate logical plan to physical by default implementor. + * TODO: for now just delegate entire logical plan to storage engine. * * @param plan logical plan * @return optimal physical plan @@ -33,28 +37,28 @@ public PhysicalPlan plan(LogicalPlan plan) { if (table == null) { return plan.accept(new DefaultImplementor<>(), null); } - return table.implement(table.optimize(optimize(plan))); + return table.implement( + table.optimize(optimize(plan))); } private Table findTable(LogicalPlan plan) { - return plan.accept( - new LogicalPlanNodeVisitor() { - - @Override - public Table visitNode(LogicalPlan node, Object context) { - List children = node.getChild(); - if (children.isEmpty()) { - return null; - } - return children.get(0).accept(this, context); - } - - @Override - public Table visitRelation(LogicalRelation node, Object context) { - return node.getTable(); - } - }, - null); + return plan.accept(new LogicalPlanNodeVisitor() { + + @Override + public Table visitNode(LogicalPlan node, Object context) { + List children = node.getChild(); + if (children.isEmpty()) { + return null; + } + return children.get(0).accept(this, context); + } + + @Override + public Table visitRelation(LogicalRelation node, Object context) { + return node.getTable(); + } + + }, null); } private LogicalPlan optimize(LogicalPlan plan) { diff --git a/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java b/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java index 1503946abc..ab195da5bf 100644 --- a/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java @@ -10,37 +10,36 @@ /** * All subtypes of PhysicalPlan which needs to be serialized (in cursor, for pagination feature) * should follow one of the following options. - * *
    *
  • Both: - *
      - *
    • Override both methods from {@link Externalizable}. - *
    • Define a public no-arg constructor. - *
    - *
  • Overwrite {@link #getPlanForSerialization} to return another instance of {@link - * SerializablePlan}. + *
      + *
    • Override both methods from {@link Externalizable}.
    • + *
    • Define a public no-arg constructor.
    • + *
    + *
  • + *
  • + * Overwrite {@link #getPlanForSerialization} to return + * another instance of {@link SerializablePlan}. + *
  • *
*/ public interface SerializablePlan extends Externalizable { /** - * Override to return child or delegated plan, so parent plan should skip this one for - * serialization, but it should try to serialize grandchild plan. Imagine plan structure like this - * + * Override to return child or delegated plan, so parent plan should skip this one + * for serialization, but it should try to serialize grandchild plan. + * Imagine plan structure like this *
    *    A         -> this
    *    `- B      -> child
    *      `- C    -> this
    * 
+ * In that case only plans A and C should be attempted to serialize. + * It is needed to skip a `ResourceMonitorPlan` instance only, actually. * - * In that case only plans A and C should be attempted to serialize. It is needed to skip a - * `ResourceMonitorPlan` instance only, actually. - * - *
{@code
-   * * A.writeObject(B.getPlanForSerialization());
-   *
-   * }
- * + *
{@code
+   *    * A.writeObject(B.getPlanForSerialization());
+   *  }
* @return Next plan for serialization. */ default SerializablePlan getPlanForSerialization() { diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java index 25dbd14f1a..c8c04b1817 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java @@ -18,7 +18,6 @@ public class LogicalAD extends LogicalPlan { /** * Constructor of LogicalAD. - * * @param child child logical plan * @param arguments arguments of the algorithm */ diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java index ecbcece623..ebca01cdf8 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -13,18 +14,26 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.aggregation.NamedAggregator; -/** Logical Aggregation. */ +/** + * Logical Aggregation. + */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalAggregation extends LogicalPlan { - @Getter private final List aggregatorList; + @Getter + private final List aggregatorList; - @Getter private final List groupByList; + @Getter + private final List groupByList; - /** Constructor of LogicalAggregation. */ + /** + * Constructor of LogicalAggregation. + */ public LogicalAggregation( - LogicalPlan child, List aggregatorList, List groupByList) { + LogicalPlan child, + List aggregatorList, + List groupByList) { super(Collections.singletonList(child)); this.aggregatorList = aggregatorList; this.groupByList = groupByList; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java index d1b98df8ed..e5c30a4f4f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java @@ -10,8 +10,8 @@ import lombok.ToString; /** - * A logical plan node which wraps {@link org.opensearch.sql.planner.LogicalCursor} and represent a - * cursor close operation. + * A logical plan node which wraps {@link org.opensearch.sql.planner.LogicalCursor} + * and represent a cursor close operation. */ @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java index 92734440f7..020352287d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Arrays; @@ -12,7 +13,9 @@ import lombok.ToString; import org.opensearch.sql.expression.Expression; -/** Logical Dedupe Plan. */ +/** + * Logical Dedupe Plan. + */ @Getter @ToString @EqualsAndHashCode(callSuper = true) @@ -23,12 +26,12 @@ public class LogicalDedupe extends LogicalPlan { private final Boolean keepEmpty; private final Boolean consecutive; - /** Constructor of LogicalDedupe. */ + /** + * Constructor of LogicalDedupe. + */ public LogicalDedupe( LogicalPlan child, - List dedupeList, - Integer allowedDuplication, - Boolean keepEmpty, + List dedupeList, Integer allowedDuplication, Boolean keepEmpty, Boolean consecutive) { super(Arrays.asList(child)); this.dedupeList = dedupeList; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java index e7b8f353bc..8ec0b84dad 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -23,10 +24,15 @@ @EqualsAndHashCode(callSuper = true) public class LogicalEval extends LogicalPlan { - @Getter private final List> expressions; + @Getter + private final List> expressions; - /** Constructor of LogicalEval. */ - public LogicalEval(LogicalPlan child, List> expressions) { + /** + * Constructor of LogicalEval. + */ + public LogicalEval( + LogicalPlan child, + List> expressions) { super(Collections.singletonList(child)); this.expressions = expressions; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java index ca16b41597..e4a0482aac 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java @@ -9,17 +9,25 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; +import org.opensearch.sql.planner.logical.LogicalPlan; +import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; import org.opensearch.sql.storage.StorageEngine; -/** A plan node which represents operation of fetching a next page from the cursor. */ +/** + * A plan node which represents operation of fetching a next page from the cursor. + */ @EqualsAndHashCode(callSuper = false) @ToString public class LogicalFetchCursor extends LogicalPlan { - @Getter private final String cursor; + @Getter + private final String cursor; - @Getter private final StorageEngine engine; + @Getter + private final StorageEngine engine; - /** LogicalCursor constructor. Does not have child plans. */ + /** + * LogicalCursor constructor. Does not have child plans. + */ public LogicalFetchCursor(String cursor, StorageEngine engine) { super(List.of()); this.cursor = cursor; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java index 49280e8709..78887ad448 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -11,14 +12,19 @@ import lombok.ToString; import org.opensearch.sql.expression.Expression; -/** Logical Filter represent the filter relation. */ +/** + * Logical Filter represent the filter relation. + */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalFilter extends LogicalPlan { - @Getter private final Expression condition; + @Getter + private final Expression condition; - /** Constructor of LogicalFilter. */ + /** + * Constructor of LogicalFilter. + */ public LogicalFilter(LogicalPlan child, Expression condition) { super(Collections.singletonList(child)); this.condition = condition; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java index 41fcd48f81..c1e873a00d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java @@ -20,9 +20,11 @@ public class LogicalHighlight extends LogicalPlan { private final Expression highlightField; private final Map arguments; - /** Constructor of LogicalHighlight. */ - public LogicalHighlight( - LogicalPlan childPlan, Expression highlightField, Map arguments) { + /** + * Constructor of LogicalHighlight. + */ + public LogicalHighlight(LogicalPlan childPlan, Expression highlightField, + Map arguments) { super(Collections.singletonList(childPlan)); this.highlightField = highlightField; this.arguments = arguments; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java index bec77d9b6f..e6253cb2cc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -17,7 +18,9 @@ public class LogicalLimit extends LogicalPlan { private final Integer limit; private final Integer offset; - /** Constructor of LogicalLimit. */ + /** + * Constructor of LogicalLimit. + */ public LogicalLimit(LogicalPlan input, Integer limit, Integer offset) { super(Collections.singletonList(input)); this.limit = limit; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java index 780e0bba94..c54ee92e08 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java @@ -7,16 +7,17 @@ import lombok.ToString; import org.opensearch.sql.ast.expression.Literal; -/** ML logical plan. */ +/** + * ML logical plan. + */ @Getter @ToString @EqualsAndHashCode(callSuper = true) public class LogicalML extends LogicalPlan { - private final Map arguments; + private final Map arguments; /** * Constructor of LogicalML. - * * @param child child logical plan * @param arguments arguments of the algorithm */ diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java index cfc313a68d..22771b42de 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java @@ -7,23 +7,25 @@ import lombok.ToString; import org.opensearch.sql.ast.expression.Literal; -/** ml-commons logical plan. */ +/** + * ml-commons logical plan. + */ @Getter @ToString @EqualsAndHashCode(callSuper = true) public class LogicalMLCommons extends LogicalPlan { private final String algorithm; - private final Map arguments; + private final Map arguments; /** * Constructor of LogicalMLCommons. - * * @param child child logical plan * @param algorithm algorithm name * @param arguments arguments of the algorithm */ - public LogicalMLCommons(LogicalPlan child, String algorithm, Map arguments) { + public LogicalMLCommons(LogicalPlan child, String algorithm, + Map arguments) { super(Collections.singletonList(child)); this.algorithm = algorithm; this.arguments = arguments; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java index e791a1fad1..3e0e167cf3 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java @@ -14,7 +14,9 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; -/** Logical Nested plan. */ +/** + * Logical Nested plan. + */ @EqualsAndHashCode(callSuper = true) @Getter @ToString @@ -22,11 +24,15 @@ public class LogicalNested extends LogicalPlan { private List> fields; private final List projectList; - /** Constructor of LogicalNested. */ + /** + * Constructor of LogicalNested. + * + */ public LogicalNested( LogicalPlan childPlan, List> fields, - List projectList) { + List projectList + ) { super(Collections.singletonList(childPlan)); this.fields = fields; this.projectList = projectList; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java index bd9f20e055..372f9dcf0b 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java @@ -10,11 +10,14 @@ import lombok.Getter; import lombok.ToString; -/** LogicalPaginate represents pagination operation for underlying plan. */ +/** + * LogicalPaginate represents pagination operation for underlying plan. + */ @ToString @EqualsAndHashCode(callSuper = false) public class LogicalPaginate extends LogicalPlan { - @Getter private final int pageSize; + @Getter + private final int pageSize; public LogicalPaginate(int pageSize, List childPlans) { super(childPlans); diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java index 2bc1a8756f..ad4a0b3794 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java @@ -3,13 +3,16 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.List; import lombok.EqualsAndHashCode; import org.opensearch.sql.planner.PlanNode; -/** The abstract base class for all the Logical Plan node. */ +/** + * The abstract base class for all the Logical Plan node. + */ @EqualsAndHashCode(callSuper = false) public abstract class LogicalPlan implements PlanNode { @@ -24,8 +27,8 @@ public LogicalPlan(List childPlans) { * * @param visitor visitor. * @param context visitor context. - * @param returned object type. - * @param context type. + * @param returned object type. + * @param context type. * @return returned object. */ public abstract R accept(LogicalPlanNodeVisitor visitor, C context); @@ -35,6 +38,7 @@ public LogicalPlan replaceChildPlans(List childPlans) { return this; } + @Override public List getChild() { return childPlans; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java index 2a886ba0ca..c0e253ca50 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import com.google.common.collect.ImmutableList; @@ -24,7 +25,9 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; -/** Logical Plan DSL. */ +/** + * Logical Plan DSL. + */ @UtilityClass public class LogicalPlanDSL { @@ -54,7 +57,7 @@ public static LogicalPlan rename( return new LogicalRename(input, renameMap); } - public static LogicalPlan paginate(LogicalPlan input, int fetchSize) { + public static LogicalPlan paginate(LogicalPlan input, int fetchSize) { return new LogicalPaginate(fetchSize, List.of(input)); } @@ -62,23 +65,23 @@ public static LogicalPlan project(LogicalPlan input, NamedExpression... fields) return new LogicalProject(input, Arrays.asList(fields), ImmutableList.of()); } - public static LogicalPlan project( - LogicalPlan input, - List fields, - List namedParseExpressions) { + public static LogicalPlan project(LogicalPlan input, List fields, + List namedParseExpressions) { return new LogicalProject(input, fields, namedParseExpressions); } - public LogicalPlan window( - LogicalPlan input, NamedExpression windowFunction, WindowDefinition windowDefinition) { + public LogicalPlan window(LogicalPlan input, + NamedExpression windowFunction, + WindowDefinition windowDefinition) { return new LogicalWindow(input, windowFunction, windowDefinition); } - public LogicalPlan highlight( - LogicalPlan input, Expression field, Map arguments) { + public LogicalPlan highlight(LogicalPlan input, Expression field, + Map arguments) { return new LogicalHighlight(input, field, arguments); } + public static LogicalPlan nested( LogicalPlan input, List> nestedArgs, @@ -113,20 +116,13 @@ public static LogicalPlan dedupe( input, Arrays.asList(fields), allowedDuplication, keepEmpty, consecutive); } - public static LogicalPlan rareTopN( - LogicalPlan input, - CommandType commandType, - List groupByList, - Expression... fields) { + public static LogicalPlan rareTopN(LogicalPlan input, CommandType commandType, + List groupByList, Expression... fields) { return rareTopN(input, commandType, 10, groupByList, fields); } - public static LogicalPlan rareTopN( - LogicalPlan input, - CommandType commandType, - int noOfResults, - List groupByList, - Expression... fields) { + public static LogicalPlan rareTopN(LogicalPlan input, CommandType commandType, int noOfResults, + List groupByList, Expression... fields) { return new LogicalRareTopN(input, commandType, noOfResults, Arrays.asList(fields), groupByList); } @@ -138,4 +134,5 @@ public LogicalPlan values(List... values) { public static LogicalPlan limit(LogicalPlan input, Integer limit, Integer offset) { return new LogicalLimit(input, limit, offset); } + } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java index 156db35306..dbe21d38e0 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import org.opensearch.sql.storage.read.TableScanBuilder; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java index 5978620480..427ccffc62 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -12,15 +13,21 @@ import lombok.ToString; import org.opensearch.sql.expression.NamedExpression; -/** Project field specified by the {@link LogicalProject#projectList}. */ +/** + * Project field specified by the {@link LogicalProject#projectList}. + */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalProject extends LogicalPlan { - @Getter private final List projectList; - @Getter private final List namedParseExpressions; + @Getter + private final List projectList; + @Getter + private final List namedParseExpressions; - /** Constructor of LogicalProject. */ + /** + * Constructor of LogicalProject. + */ public LogicalProject( LogicalPlan child, List projectList, diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java index 2c387eca9c..4744bc590f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -13,7 +14,9 @@ import org.opensearch.sql.ast.tree.RareTopN.CommandType; import org.opensearch.sql.expression.Expression; -/** Logical Rare and TopN Plan. */ +/** + * Logical Rare and TopN Plan. + */ @Getter @ToString @EqualsAndHashCode(callSuper = true) @@ -24,11 +27,12 @@ public class LogicalRareTopN extends LogicalPlan { private final List fieldList; private final List groupByList; - /** Constructor of LogicalRareTopN. */ + /** + * Constructor of LogicalRareTopN. + */ public LogicalRareTopN( LogicalPlan child, - CommandType commandType, - Integer noOfResults, + CommandType commandType, Integer noOfResults, List fieldList, List groupByList) { super(Collections.singletonList(child)); diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java index d50e286e1d..a49c3d5cbe 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import com.google.common.collect.ImmutableList; @@ -11,16 +12,22 @@ import lombok.ToString; import org.opensearch.sql.storage.Table; -/** Logical Relation represent the data source. */ +/** + * Logical Relation represent the data source. + */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalRelation extends LogicalPlan { - @Getter private final String relationName; + @Getter + private final String relationName; - @Getter private final Table table; + @Getter + private final Table table; - /** Constructor of LogicalRelation. */ + /** + * Constructor of LogicalRelation. + */ public LogicalRelation(String relationName, Table table) { super(ImmutableList.of()); this.relationName = relationName; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java index c1aeda22c7..cda7282c40 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -12,15 +13,22 @@ import lombok.ToString; import org.opensearch.sql.expression.ReferenceExpression; -/** Remove field specified by the {@link LogicalRemove#removeList}. */ +/** + * Remove field specified by the {@link LogicalRemove#removeList}. + */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalRemove extends LogicalPlan { - @Getter private final Set removeList; + @Getter + private final Set removeList; - /** Constructor of LogicalRemove. */ - public LogicalRemove(LogicalPlan child, Set removeList) { + /** + * Constructor of LogicalRemove. + */ + public LogicalRemove( + LogicalPlan child, + Set removeList) { super(Collections.singletonList(child)); this.removeList = removeList; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java index 25ee645932..007a0a6fca 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -12,15 +13,23 @@ import lombok.ToString; import org.opensearch.sql.expression.ReferenceExpression; -/** Rename Operator. renameList is list of mapping of source and target. */ +/** + * Rename Operator. + * renameList is list of mapping of source and target. + */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalRename extends LogicalPlan { - @Getter private final Map renameMap; + @Getter + private final Map renameMap; - /** Constructor of LogicalRename. */ - public LogicalRename(LogicalPlan child, Map renameMap) { + /** + * Constructor of LogicalRename. + */ + public LogicalRename( + LogicalPlan child, + Map renameMap) { super(Collections.singletonList(child)); this.renameMap = renameMap; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java index 569ca7e309..947411518f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -14,7 +15,9 @@ import org.opensearch.sql.ast.tree.Sort.SortOption; import org.opensearch.sql.expression.Expression; -/** Sort Plan. */ +/** + * Sort Plan. + */ @Getter @ToString @EqualsAndHashCode(callSuper = true) @@ -22,8 +25,12 @@ public class LogicalSort extends LogicalPlan { private final List> sortList; - /** Constructor of LogicalSort. */ - public LogicalSort(LogicalPlan child, List> sortList) { + /** + * Constructor of LogicalSort. + */ + public LogicalSort( + LogicalPlan child, + List> sortList) { super(Collections.singletonList(child)); this.sortList = sortList; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java index e586c391ff..29d2db54b2 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import com.google.common.collect.ImmutableList; @@ -13,14 +14,16 @@ import org.opensearch.sql.expression.LiteralExpression; /** - * Logical operator which is a sequence of literal rows (like a relation). Basically, Values - * operator is used to create rows of constant literals "out of nothing" which is corresponding with - * VALUES clause in SQL. Mostly all rows must have the same number of literals and each column - * should have same type or can be converted implicitly. In particular, typical use cases include: - * 1. Project without relation involved. 2. Defining query or insertion without a relation. Take the - * following logical plan for example: - * - *
+ * Logical operator which is a sequence of literal rows (like a relation).
+ * Basically, Values operator is used to create rows of constant literals
+ * "out of nothing" which is corresponding with VALUES clause in SQL.
+ * Mostly all rows must have the same number of literals and each column should
+ * have same type or can be converted implicitly.
+ * In particular, typical use cases include:
+ *  1. Project without relation involved.
+ *  2. Defining query or insertion without a relation.
+ * Take the following logical plan for example:
+ *  
  *  LogicalProject(expr=[log(2),true,1+2])
  *   |_ LogicalValues([[]])  #an empty row so that Project can evaluate its expressions in next()
  *  
@@ -32,8 +35,11 @@ public class LogicalValues extends LogicalPlan { private final List> values; - /** Constructor of LogicalValues. */ - public LogicalValues(List> values) { + /** + * Constructor of LogicalValues. + */ + public LogicalValues( + List> values) { super(ImmutableList.of()); this.values = values; } @@ -42,4 +48,5 @@ public LogicalValues(List> values) { public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitValues(this, context); } + } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java index 00c89410a7..022b284674 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -24,9 +25,13 @@ public class LogicalWindow extends LogicalPlan { private final NamedExpression windowFunction; private final WindowDefinition windowDefinition; - /** Constructor of logical window. */ + /** + * Constructor of logical window. + */ public LogicalWindow( - LogicalPlan child, NamedExpression windowFunction, WindowDefinition windowDefinition) { + LogicalPlan child, + NamedExpression windowFunction, + WindowDefinition windowDefinition) { super(Collections.singletonList(child)); this.windowFunction = windowFunction; this.windowDefinition = windowDefinition; @@ -36,4 +41,5 @@ public LogicalWindow( public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitWindow(this, context); } + } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java index a253739a68..496e6009e3 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java @@ -12,7 +12,9 @@ import lombok.ToString; import org.opensearch.sql.storage.Table; -/** Logical operator for insert statement. */ +/** + * Logical operator for insert statement. + */ @EqualsAndHashCode(callSuper = true) @Getter @ToString @@ -24,7 +26,9 @@ public class LogicalWrite extends LogicalPlan { /** Optional column name list specified in insert statement. */ private final List columns; - /** Construct a logical write with given child node, table and column name list. */ + /** + * Construct a logical write with given child node, table and column name list. + */ public LogicalWrite(LogicalPlan child, Table table, List columns) { super(Collections.singletonList(child)); this.table = table; diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java index 7850b1259f..be1227c1da 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.optimizer; import static com.facebook.presto.matching.DefaultMatcher.DEFAULT_MATCHER; @@ -19,48 +20,56 @@ import org.opensearch.sql.planner.optimizer.rule.write.CreateTableWriteBuilder; /** - * {@link LogicalPlan} Optimizer. The Optimizer will run in the TopDown manner. 1> Optimize the - * current node with all the rules. 2> Optimize the all the child nodes with all the rules. 3) In - * case the child node could change, Optimize the current node again. + * {@link LogicalPlan} Optimizer. + * The Optimizer will run in the TopDown manner. + * 1> Optimize the current node with all the rules. + * 2> Optimize the all the child nodes with all the rules. + * 3) In case the child node could change, Optimize the current node again. */ public class LogicalPlanOptimizer { private final List> rules; - /** Create {@link LogicalPlanOptimizer} with customized rules. */ + /** + * Create {@link LogicalPlanOptimizer} with customized rules. + */ public LogicalPlanOptimizer(List> rules) { this.rules = rules; } - /** Create {@link LogicalPlanOptimizer} with pre-defined rules. */ + /** + * Create {@link LogicalPlanOptimizer} with pre-defined rules. + */ public static LogicalPlanOptimizer create() { - return new LogicalPlanOptimizer( - Arrays.asList( - /* - * Phase 1: Transformations that rely on relational algebra equivalence - */ - new MergeFilterAndFilter(), - new PushFilterUnderSort(), - /* - * Phase 2: Transformations that rely on data source push down capability - */ - new CreateTableScanBuilder(), - TableScanPushDown.PUSH_DOWN_FILTER, - TableScanPushDown.PUSH_DOWN_AGGREGATION, - TableScanPushDown.PUSH_DOWN_SORT, - TableScanPushDown.PUSH_DOWN_LIMIT, - new PushDownPageSize(), - TableScanPushDown.PUSH_DOWN_HIGHLIGHT, - TableScanPushDown.PUSH_DOWN_NESTED, - TableScanPushDown.PUSH_DOWN_PROJECT, - new CreateTableWriteBuilder())); + return new LogicalPlanOptimizer(Arrays.asList( + /* + * Phase 1: Transformations that rely on relational algebra equivalence + */ + new MergeFilterAndFilter(), + new PushFilterUnderSort(), + /* + * Phase 2: Transformations that rely on data source push down capability + */ + new CreateTableScanBuilder(), + TableScanPushDown.PUSH_DOWN_FILTER, + TableScanPushDown.PUSH_DOWN_AGGREGATION, + TableScanPushDown.PUSH_DOWN_SORT, + TableScanPushDown.PUSH_DOWN_LIMIT, + new PushDownPageSize(), + TableScanPushDown.PUSH_DOWN_HIGHLIGHT, + TableScanPushDown.PUSH_DOWN_NESTED, + TableScanPushDown.PUSH_DOWN_PROJECT, + new CreateTableWriteBuilder())); } - /** Optimize {@link LogicalPlan}. */ + /** + * Optimize {@link LogicalPlan}. + */ public LogicalPlan optimize(LogicalPlan plan) { LogicalPlan optimized = internalOptimize(plan); optimized.replaceChildPlans( - optimized.getChild().stream().map(this::optimize).collect(Collectors.toList())); + optimized.getChild().stream().map(this::optimize).collect( + Collectors.toList())); return internalOptimize(optimized); } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java index 5201c83c25..8150de824d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java @@ -14,12 +14,15 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.storage.read.TableScanBuilder; -/** A {@link LogicalPlanOptimizer} rule that pushes down page size to table scan builder. */ +/** + * A {@link LogicalPlanOptimizer} rule that pushes down page size + * to table scan builder. + */ public class PushDownPageSize implements Rule { @Override public Pattern pattern() { return Pattern.typeOf(LogicalPaginate.class) - .matching(lp -> findTableScanBuilder(lp).isPresent()); + .matching(lp -> findTableScanBuilder(lp).isPresent()); } @Override @@ -41,7 +44,7 @@ private Optional findTableScanBuilder(LogicalPaginate logicalP if (children.stream().anyMatch(TableScanBuilder.class::isInstance)) { if (children.size() > 1) { throw new UnsupportedOperationException( - "Unsupported plan: relation operator cannot have siblings"); + "Unsupported plan: relation operator cannot have siblings"); } return Optional.of((TableScanBuilder) children.get(0)); } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java index b06ca3e968..123754d3d0 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.optimizer; import com.facebook.presto.matching.Captures; @@ -11,17 +12,17 @@ /** * Optimization Rule. - * * @param LogicalPlan. */ public interface Rule { - /** Get the {@link Pattern}. */ + /** + * Get the {@link Pattern}. + */ Pattern pattern(); /** * Apply the Rule to the LogicalPlan. - * * @param plan LogicalPlan which match the Pattern. * @param captures A list of LogicalPlan which are captured by the Pattern. * @return the transfromed LogicalPlan. diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java index ee4e9a20cc..8f5ac86580 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.optimizer.pattern; import com.facebook.presto.matching.Capture; @@ -24,89 +25,108 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.storage.read.TableScanBuilder; -/** Pattern helper class. */ +/** + * Pattern helper class. + */ @UtilityClass public class Patterns { - /** Logical filter with a given pattern on inner field. */ + /** + * Logical filter with a given pattern on inner field. + */ public static Pattern filter(Pattern pattern) { return Pattern.typeOf(LogicalFilter.class).with(source(pattern)); } - /** Logical aggregate operator with a given pattern on inner field. */ + /** + * Logical aggregate operator with a given pattern on inner field. + */ public static Pattern aggregate(Pattern pattern) { return Pattern.typeOf(LogicalAggregation.class).with(source(pattern)); } - /** Logical sort operator with a given pattern on inner field. */ + /** + * Logical sort operator with a given pattern on inner field. + */ public static Pattern sort(Pattern pattern) { return Pattern.typeOf(LogicalSort.class).with(source(pattern)); } - /** Logical limit operator with a given pattern on inner field. */ + /** + * Logical limit operator with a given pattern on inner field. + */ public static Pattern limit(Pattern pattern) { return Pattern.typeOf(LogicalLimit.class).with(source(pattern)); } - /** Logical highlight operator with a given pattern on inner field. */ + /** + * Logical highlight operator with a given pattern on inner field. + */ public static Pattern highlight(Pattern pattern) { return Pattern.typeOf(LogicalHighlight.class).with(source(pattern)); } - /** Logical nested operator with a given pattern on inner field. */ + /** + * Logical nested operator with a given pattern on inner field. + */ public static Pattern nested(Pattern pattern) { return Pattern.typeOf(LogicalNested.class).with(source(pattern)); } - /** Logical project operator with a given pattern on inner field. */ + /** + * Logical project operator with a given pattern on inner field. + */ public static Pattern project(Pattern pattern) { return Pattern.typeOf(LogicalProject.class).with(source(pattern)); } - /** Pattern for {@link TableScanBuilder} and capture it meanwhile. */ + /** + * Pattern for {@link TableScanBuilder} and capture it meanwhile. + */ public static Pattern scanBuilder() { return Pattern.typeOf(TableScanBuilder.class).capturedAs(Capture.newCapture()); } - /** LogicalPlan source {@link Property}. */ + /** + * LogicalPlan source {@link Property}. + */ public static Property source() { - return Property.optionalProperty( - "source", - plan -> - plan.getChild().size() == 1 ? Optional.of(plan.getChild().get(0)) : Optional.empty()); + return Property.optionalProperty("source", plan -> plan.getChild().size() == 1 + ? Optional.of(plan.getChild().get(0)) + : Optional.empty()); } - /** Source (children field) with a given pattern. */ + /** + * Source (children field) with a given pattern. + */ @SuppressWarnings("unchecked") - public static PropertyPattern source(Pattern pattern) { - Property property = - Property.optionalProperty( - "source", - plan -> - plan.getChild().size() == 1 - ? Optional.of((T) plan.getChild().get(0)) - : Optional.empty()); + public static + PropertyPattern source(Pattern pattern) { + Property property = Property.optionalProperty("source", + plan -> plan.getChild().size() == 1 + ? Optional.of((T) plan.getChild().get(0)) + : Optional.empty()); return property.matching(pattern); } - /** Logical relation with table field. */ + /** + * Logical relation with table field. + */ public static Property table() { - return Property.optionalProperty( - "table", - plan -> - plan instanceof LogicalRelation - ? Optional.of(((LogicalRelation) plan).getTable()) - : Optional.empty()); + return Property.optionalProperty("table", + plan -> plan instanceof LogicalRelation + ? Optional.of(((LogicalRelation) plan).getTable()) + : Optional.empty()); } - /** Logical write with table field. */ + /** + * Logical write with table field. + */ public static Property writeTable() { - return Property.optionalProperty( - "table", - plan -> - plan instanceof LogicalWrite - ? Optional.of(((LogicalWrite) plan).getTable()) - : Optional.empty()); + return Property.optionalProperty("table", + plan -> plan instanceof LogicalWrite + ? Optional.of(((LogicalWrite) plan).getTable()) + : Optional.empty()); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java index 6270eee131..57763728d5 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.optimizer.rule; import static com.facebook.presto.matching.Pattern.typeOf; @@ -18,7 +19,9 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.optimizer.Rule; -/** Merge Filter --> Filter to the single Filter condition. */ +/** + * Merge Filter --> Filter to the single Filter condition. + */ public class MergeFilterAndFilter implements Rule { private final Capture capture; @@ -27,18 +30,22 @@ public class MergeFilterAndFilter implements Rule { @Getter private final Pattern pattern; - /** Constructor of MergeFilterAndFilter. */ + /** + * Constructor of MergeFilterAndFilter. + */ public MergeFilterAndFilter() { this.capture = Capture.newCapture(); - this.pattern = - typeOf(LogicalFilter.class) - .with(source().matching(typeOf(LogicalFilter.class).capturedAs(capture))); + this.pattern = typeOf(LogicalFilter.class) + .with(source().matching(typeOf(LogicalFilter.class).capturedAs(capture))); } @Override - public LogicalPlan apply(LogicalFilter filter, Captures captures) { + public LogicalPlan apply(LogicalFilter filter, + Captures captures) { LogicalFilter childFilter = captures.get(capture); return new LogicalFilter( - childFilter.getChild().get(0), DSL.and(filter.getCondition(), childFilter.getCondition())); + childFilter.getChild().get(0), + DSL.and(filter.getCondition(), childFilter.getCondition()) + ); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java index 045ab473f1..e3347b402b 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.optimizer.rule; import static com.facebook.presto.matching.Pattern.typeOf; @@ -18,7 +19,10 @@ import org.opensearch.sql.planner.logical.LogicalSort; import org.opensearch.sql.planner.optimizer.Rule; -/** Push Filter under Sort. Filter - Sort - Child --> Sort - Filter - Child */ +/** + * Push Filter under Sort. + * Filter - Sort - Child --> Sort - Filter - Child + */ public class PushFilterUnderSort implements Rule { private final Capture capture; @@ -27,17 +31,22 @@ public class PushFilterUnderSort implements Rule { @Getter private final Pattern pattern; - /** Constructor of PushFilterUnderSort. */ + /** + * Constructor of PushFilterUnderSort. + */ public PushFilterUnderSort() { this.capture = Capture.newCapture(); - this.pattern = - typeOf(LogicalFilter.class) - .with(source().matching(typeOf(LogicalSort.class).capturedAs(capture))); + this.pattern = typeOf(LogicalFilter.class) + .with(source().matching(typeOf(LogicalSort.class).capturedAs(capture))); } @Override - public LogicalPlan apply(LogicalFilter filter, Captures captures) { + public LogicalPlan apply(LogicalFilter filter, + Captures captures) { LogicalSort sort = captures.get(capture); - return new LogicalSort(filter.replaceChildPlans(sort.getChild()), sort.getSortList()); + return new LogicalSort( + filter.replaceChildPlans(sort.getChild()), + sort.getSortList() + ); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java index 6ed8e1faeb..dbe61ca8c3 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java @@ -19,9 +19,9 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * Rule that replace logical relation operator to {@link TableScanBuilder} for later push down - * optimization. All push down optimization rules that depends on table scan builder needs to run - * after this. + * Rule that replace logical relation operator to {@link TableScanBuilder} for later + * push down optimization. All push down optimization rules that depends on table scan + * builder needs to run after this. */ public class CreateTableScanBuilder implements Rule { @@ -33,10 +33,13 @@ public class CreateTableScanBuilder implements Rule { @Getter private final Pattern pattern; - /** Construct create table scan builder rule. */ + /** + * Construct create table scan builder rule. + */ public CreateTableScanBuilder() { this.capture = Capture.newCapture(); - this.pattern = Pattern.typeOf(LogicalRelation.class).with(table().capturedAs(capture)); + this.pattern = Pattern.typeOf(LogicalRelation.class) + .with(table().capturedAs(capture)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java index b83155d90f..de2b47d403 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java @@ -26,10 +26,10 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * Rule template for all table scan push down rules. Because all push down optimization rules have - * similar workflow in common, such as a pattern that match an operator on top of table scan - * builder, and action that eliminates the original operator if pushed down, this class helps remove - * redundant code and improve readability. + * Rule template for all table scan push down rules. Because all push down optimization rules + * have similar workflow in common, such as a pattern that match an operator on top of table scan + * builder, and action that eliminates the original operator if pushed down, this class helps + * remove redundant code and improve readability. * * @param logical plan node type */ @@ -37,32 +37,48 @@ public class TableScanPushDown implements Rule { /** Push down optimize rule for filtering condition. */ public static final Rule PUSH_DOWN_FILTER = - match(filter(scanBuilder())) - .apply((filter, scanBuilder) -> scanBuilder.pushDownFilter(filter)); + match( + filter( + scanBuilder())) + .apply((filter, scanBuilder) -> scanBuilder.pushDownFilter(filter)); /** Push down optimize rule for aggregate operator. */ public static final Rule PUSH_DOWN_AGGREGATION = - match(aggregate(scanBuilder())) - .apply((agg, scanBuilder) -> scanBuilder.pushDownAggregation(agg)); + match( + aggregate( + scanBuilder())) + .apply((agg, scanBuilder) -> scanBuilder.pushDownAggregation(agg)); /** Push down optimize rule for sort operator. */ public static final Rule PUSH_DOWN_SORT = - match(sort(scanBuilder())).apply((sort, scanBuilder) -> scanBuilder.pushDownSort(sort)); + match( + sort( + scanBuilder())) + .apply((sort, scanBuilder) -> scanBuilder.pushDownSort(sort)); /** Push down optimize rule for limit operator. */ public static final Rule PUSH_DOWN_LIMIT = - match(limit(scanBuilder())).apply((limit, scanBuilder) -> scanBuilder.pushDownLimit(limit)); + match( + limit( + scanBuilder())) + .apply((limit, scanBuilder) -> scanBuilder.pushDownLimit(limit)); public static final Rule PUSH_DOWN_PROJECT = - match(project(scanBuilder())) - .apply((project, scanBuilder) -> scanBuilder.pushDownProject(project)); + match( + project( + scanBuilder())) + .apply((project, scanBuilder) -> scanBuilder.pushDownProject(project)); public static final Rule PUSH_DOWN_HIGHLIGHT = - match(highlight(scanBuilder())) + match( + highlight( + scanBuilder())) .apply((highlight, scanBuilder) -> scanBuilder.pushDownHighlight(highlight)); public static final Rule PUSH_DOWN_NESTED = - match(nested(scanBuilder())) + match( + nested( + scanBuilder())) .apply((nested, scanBuilder) -> scanBuilder.pushDownNested(nested)); /** Pattern that matches a plan node. */ @@ -74,9 +90,10 @@ public class TableScanPushDown implements Rule { /** Push down function applied to the plan node and captured table scan builder. */ private final BiFunction pushDownFunction; + @SuppressWarnings("unchecked") - private TableScanPushDown( - WithPattern pattern, BiFunction pushDownFunction) { + private TableScanPushDown(WithPattern pattern, + BiFunction pushDownFunction) { this.pattern = pattern; this.capture = ((CapturePattern) pattern.getPattern()).capture(); this.pushDownFunction = pushDownFunction; @@ -96,18 +113,22 @@ public LogicalPlan apply(T plan, Captures captures) { return plan; } - /** Custom builder class other than generated by Lombok to provide more readable code. */ + /** + * Custom builder class other than generated by Lombok to provide more readable code. + */ static class TableScanPushDownBuilder { private WithPattern pattern; - public static TableScanPushDownBuilder match(Pattern pattern) { + public static + TableScanPushDownBuilder match(Pattern pattern) { TableScanPushDownBuilder builder = new TableScanPushDownBuilder<>(); builder.pattern = (WithPattern) pattern; return builder; } - public TableScanPushDown apply(BiFunction pushDownFunction) { + public TableScanPushDown apply( + BiFunction pushDownFunction) { return new TableScanPushDown<>(pattern, pushDownFunction); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java index 0a4045d404..4fbf676862 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java @@ -32,10 +32,13 @@ public class CreateTableWriteBuilder implements Rule { @Getter private final Pattern pattern; - /** Construct create table write builder rule. */ + /** + * Construct create table write builder rule. + */ public CreateTableWriteBuilder() { this.capture = Capture.newCapture(); - this.pattern = Pattern.typeOf(LogicalWrite.class).with(writeTable().capturedAs(capture)); + this.pattern = Pattern.typeOf(LogicalWrite.class) + .with(writeTable().capturedAs(capture)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java index cc1c047c31..1d9523464b 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import java.util.Collections; @@ -16,6 +17,7 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.aggregation.Aggregator; import org.opensearch.sql.expression.aggregation.NamedAggregator; +import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.planner.physical.collector.Collector; import org.opensearch.sql.storage.bindingtuple.BindingTuple; @@ -26,26 +28,30 @@ @EqualsAndHashCode(callSuper = false) @ToString public class AggregationOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; - @Getter private final List aggregatorList; - @Getter private final List groupByExprList; - - /** {@link BindingTuple} Collector. */ - @EqualsAndHashCode.Exclude private final Collector collector; + @Getter + private final PhysicalPlan input; + @Getter + private final List aggregatorList; + @Getter + private final List groupByExprList; - @EqualsAndHashCode.Exclude private Iterator iterator; + /** + * {@link BindingTuple} Collector. + */ + @EqualsAndHashCode.Exclude + private final Collector collector; + @EqualsAndHashCode.Exclude + private Iterator iterator; /** * AggregationOperator Constructor. * - * @param input Input {@link PhysicalPlan} - * @param aggregatorList List of {@link Aggregator} + * @param input Input {@link PhysicalPlan} + * @param aggregatorList List of {@link Aggregator} * @param groupByExprList List of group by {@link Expression} */ - public AggregationOperator( - PhysicalPlan input, - List aggregatorList, - List groupByExprList) { + public AggregationOperator(PhysicalPlan input, List aggregatorList, + List groupByExprList) { this.input = input; this.aggregatorList = aggregatorList; this.groupByExprList = groupByExprList; @@ -62,6 +68,7 @@ public List getChild() { return Collections.singletonList(input); } + @Override public boolean hasNext() { return iterator.hasNext(); diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java index 688ffa0d8d..7921d0dd50 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java @@ -11,9 +11,9 @@ import org.opensearch.sql.executor.ExecutionEngine; /** - * A plan node which blocks issuing a request in {@link #open} and getting results in {@link - * #hasNext}, but doesn't block releasing resources in {@link #close}. Designed to be on top of the - * deserialized tree. + * A plan node which blocks issuing a request in {@link #open} and + * getting results in {@link #hasNext}, but doesn't block releasing resources in {@link #close}. + * Designed to be on top of the deserialized tree. */ @RequiredArgsConstructor public class CursorCloseOperator extends PhysicalPlan { @@ -41,7 +41,9 @@ public List getChild() { return List.of(input); } - /** Provides an empty schema, because this plan node is always located on the top of the tree. */ + /** + * Provides an empty schema, because this plan node is always located on the top of the tree. + */ @Override public ExecutionEngine.Schema schema() { return new ExecutionEngine.Schema(List.of()); diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java index 7faec2154b..452fbd9707 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -27,14 +28,21 @@ @Getter @EqualsAndHashCode(callSuper = false) public class DedupeOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; - @Getter private final List dedupeList; - @Getter private final Integer allowedDuplication; - @Getter private final Boolean keepEmpty; - @Getter private final Boolean consecutive; - - @EqualsAndHashCode.Exclude private final Deduper> deduper; - @EqualsAndHashCode.Exclude private ExprValue next; + @Getter + private final PhysicalPlan input; + @Getter + private final List dedupeList; + @Getter + private final Integer allowedDuplication; + @Getter + private final Boolean keepEmpty; + @Getter + private final Boolean consecutive; + + @EqualsAndHashCode.Exclude + private final Deduper> deduper; + @EqualsAndHashCode.Exclude + private ExprValue next; private static final Integer ALL_ONE_DUPLICATION = 1; private static final Boolean IGNORE_EMPTY = false; @@ -49,7 +57,6 @@ public DedupeOperator(PhysicalPlan input, List dedupeList) { /** * Dedup Constructor. - * * @param input input {@link PhysicalPlan} * @param dedupeList list of dedupe {@link Expression} * @param allowedDuplication max allowed duplication @@ -133,7 +140,9 @@ static class Deduper { private final BiFunction, K, Integer> seenFirstTime; private final Map seenMap = new ConcurrentHashMap<>(); - /** The Historical Deduper monitor the duplicated element with all the seen value. */ + /** + * The Historical Deduper monitor the duplicated element with all the seen value. + */ public static Deduper historicalDeduper() { return new Deduper<>( (map, key) -> { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java index ac62fe1b86..3b9e1a8214 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; @@ -33,15 +34,17 @@ * If the field name exist in the input, a new value will be put into to output. * *

The {@link EvalOperator#expressionList} are evaluated from left to right. It means you can - * reference previous evaluated field. e.g. fields velocity = distance/time, doubleVelocity = 2 * - * velocity + * reference previous evaluated field. + * e.g. fields velocity = distance/time, doubleVelocity = 2 * velocity */ @ToString @EqualsAndHashCode(callSuper = false) @RequiredArgsConstructor public class EvalOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; - @Getter private final List> expressionList; + @Getter + private final PhysicalPlan input; + @Getter + private final List> expressionList; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { @@ -83,7 +86,6 @@ public ExprValue next() { /** * Evaluate the expression in the {@link EvalOperator#expressionList} with {@link Environment}. - * * @param env {@link Environment} * @return The mapping of reference and {@link ExprValue} for each expression. */ diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java index ec61d53163..4b5045d24e 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java @@ -17,17 +17,21 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** - * The Filter operator represents WHERE clause and uses the conditions to evaluate the input {@link - * BindingTuple}. The Filter operator only returns the results that evaluated to true. The NULL and - * MISSING are handled by the logic defined in {@link BinaryPredicateOperator}. + * The Filter operator represents WHERE clause and + * uses the conditions to evaluate the input {@link BindingTuple}. + * The Filter operator only returns the results that evaluated to true. + * The NULL and MISSING are handled by the logic defined in {@link BinaryPredicateOperator}. */ @EqualsAndHashCode(callSuper = false) @ToString @RequiredArgsConstructor public class FilterOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; - @Getter private final Expression conditions; - @ToString.Exclude private ExprValue next = null; + @Getter + private final PhysicalPlan input; + @Getter + private final Expression conditions; + @ToString.Exclude + private ExprValue next = null; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java index dc9038f2a3..cd84234c4b 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -14,15 +15,15 @@ import org.opensearch.sql.data.model.ExprValue; /** - * The limit operator sets a window, to and block the rows out of the window and allow only the - * result subset within this window to the output. + * The limit operator sets a window, to and block the rows out of the window + * and allow only the result subset within this window to the output. * - *

The result subset is enframed from original result with {@link LimitOperator#offset} as the - * offset and {@link LimitOperator#limit} as the size, thus the output is the subset of the original - * result set that has indices from {index + 1} to {index + limit}. Special cases might occur where - * the result subset has a size smaller than expected {limit}, it occurs when the original result - * set has a size smaller than {index + limit}, or even not greater than the offset. The latter - * results in an empty output. + *

The result subset is enframed from original result with {@link LimitOperator#offset} + * as the offset and {@link LimitOperator#limit} as the size, thus the output + * is the subset of the original result set that has indices from {index + 1} to {index + limit}. + * Special cases might occur where the result subset has a size smaller than expected {limit}, + * it occurs when the original result set has a size smaller than {index + limit}, + * or even not greater than the offset. The latter results in an empty output.

*/ @RequiredArgsConstructor @Getter @@ -65,4 +66,5 @@ public R accept(PhysicalPlanNodeVisitor visitor, C context) { public List getChild() { return ImmutableList.of(input); } + } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java index 348d085313..54cd541519 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java @@ -27,47 +27,58 @@ import org.opensearch.sql.expression.ReferenceExpression; /** - * The NestedOperator evaluates the {@link NestedOperator#fields} and generates {@link - * NestedOperator#nonNestedFields} to form the {@link NestedOperator#result} output. Resolve two - * nested fields with differing paths will result in a cartesian product(inner join). + * The NestedOperator evaluates the {@link NestedOperator#fields} and + * generates {@link NestedOperator#nonNestedFields} to form the + * {@link NestedOperator#result} output. Resolve two nested fields + * with differing paths will result in a cartesian product(inner join). */ @EqualsAndHashCode(callSuper = false) public class NestedOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; - @Getter private final Set fields; // Needs to be a Set to match legacy implementation - @Getter private final Map> groupedPathsAndFields; - @EqualsAndHashCode.Exclude private List> result = new ArrayList<>(); - @EqualsAndHashCode.Exclude private List nonNestedFields = new ArrayList<>(); - + @Getter + private final PhysicalPlan input; + @Getter + private final Set fields; // Needs to be a Set to match legacy implementation + @Getter + private final Map> groupedPathsAndFields; + @EqualsAndHashCode.Exclude + private List> result = new ArrayList<>(); + @EqualsAndHashCode.Exclude + private List nonNestedFields = new ArrayList<>(); @EqualsAndHashCode.Exclude private ListIterator> flattenedResult = result.listIterator(); /** * Constructor for NestedOperator with list of map as arg. - * * @param input : PhysicalPlan input. * @param fields : List of all fields and paths for nested fields. */ public NestedOperator(PhysicalPlan input, List> fields) { this.input = input; - this.fields = fields.stream().map(m -> m.get("field").toString()).collect(Collectors.toSet()); - this.groupedPathsAndFields = - fields.stream() - .collect( - Collectors.groupingBy( - m -> m.get("path").toString(), - mapping(m -> m.get("field").toString(), toList()))); + this.fields = fields.stream() + .map(m -> m.get("field").toString()) + .collect(Collectors.toSet()); + this.groupedPathsAndFields = fields.stream().collect( + Collectors.groupingBy( + m -> m.get("path").toString(), + mapping( + m -> m.get("field").toString(), + toList() + ) + ) + ); } /** * Constructor for NestedOperator with Set of fields. - * * @param input : PhysicalPlan input. * @param fields : List of all fields for nested fields. * @param groupedPathsAndFields : Map of fields grouped by their path. */ public NestedOperator( - PhysicalPlan input, Set fields, Map> groupedPathsAndFields) { + PhysicalPlan input, + Set fields, + Map> groupedPathsAndFields + ) { this.input = input; this.fields = fields; this.groupedPathsAndFields = groupedPathsAndFields; @@ -117,16 +128,16 @@ public ExprValue next() { } /** - * Generate list of non-nested fields that are in inputMap, but not in the member variable fields - * list. - * + * Generate list of non-nested fields that are in inputMap, but not in the member variable + * fields list. * @param inputMap : Row to parse non-nested fields. */ public void generateNonNestedFieldsMap(ExprValue inputMap) { for (Map.Entry inputField : inputMap.tupleValue().entrySet()) { boolean foundNestedField = - this.fields.stream() - .anyMatch(field -> field.split("\\.")[0].equalsIgnoreCase(inputField.getKey())); + this.fields.stream().anyMatch( + field -> field.split("\\.")[0].equalsIgnoreCase(inputField.getKey()) + ); if (!foundNestedField) { this.nonNestedFields.add(inputField.getKey()); @@ -134,13 +145,20 @@ public void generateNonNestedFieldsMap(ExprValue inputMap) { } } + /** - * Simplifies the structure of row's source Map by flattening it, making the full path of an - * object the key and the Object it refers to the value. + * Simplifies the structure of row's source Map by flattening it, + * making the full path of an object the key + * and the Object it refers to the value. * - *

Sample input: keys = ['comments.likes'] row = comments: { likes: 2 } + *

Sample input: + * keys = ['comments.likes'] + * row = comments: { + * likes: 2 + * } * - *

Return: flattenedRow = {comment.likes: 2} + *

Return: + * flattenedRow = {comment.likes: 2} * * @param nestedField : Field to query in row. * @param row : Row returned from OS. @@ -148,7 +166,11 @@ public void generateNonNestedFieldsMap(ExprValue inputMap) { * @return : List of nested select items or cartesian product of nested calls. */ private List> flatten( - String nestedField, ExprValue row, List> prevList) { + String nestedField, + ExprValue row, + List> prevList + ) { List> copy = new ArrayList<>(); List> newList = new ArrayList<>(); @@ -179,10 +201,11 @@ private List> flatten( // Generate cartesian product for (Map prevMap : prevList) { for (Map newMap : copy) { - newList.add( - Stream.of(newMap, prevMap) - .flatMap(map -> map.entrySet().stream()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + newList.add(Stream.of(newMap, prevMap) + .flatMap(map -> map.entrySet().stream()) + .collect(Collectors.toMap( + Map.Entry::getKey, + Map.Entry::getValue))); } } return newList; @@ -191,7 +214,6 @@ private List> flatten( /** * Check if newMap field has any sharing paths in prevMap. - * * @param newMap : New map to add to result set. * @return : true if there is already a field added to result set with same path. */ @@ -221,11 +243,9 @@ boolean containSamePath(Map newMap) { * @return : Object at current nested level. */ private void getNested( - String field, - String nestedField, - ExprValue row, - List> ret, - ExprValue nestedObj) { + String field, String nestedField, ExprValue row, + List> ret, ExprValue nestedObj + ) { ExprValue currentObj = (nestedObj == null) ? row : nestedObj; String[] splitKeys = nestedField.split("\\."); @@ -251,10 +271,12 @@ private void getNested( // Return final nested result if (currentObj != null && (StringUtils.substringAfterLast(field, ".").equals(nestedField) - || !field.contains("."))) { + || !field.contains(".")) + ) { ret.add(new LinkedHashMap<>(Map.of(field, currentObj))); } else if (currentObj != null) { - getNested(field, nestedField.substring(nestedField.indexOf(".") + 1), row, ret, currentObj); + getNested(field, nestedField.substring(nestedField.indexOf(".") + 1), + row, ret, currentObj); } } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java index 0ae795aa31..247b347940 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import java.util.Iterator; @@ -11,7 +12,9 @@ import org.opensearch.sql.planner.PlanNode; import org.opensearch.sql.storage.split.Split; -/** Physical plan. */ +/** + * Physical plan. + */ public abstract class PhysicalPlan implements PlanNode, Iterator, AutoCloseable { /** @@ -19,8 +22,8 @@ public abstract class PhysicalPlan * * @param visitor visitor. * @param context visitor context. - * @param returned object type. - * @param context type. + * @param returned object type. + * @param context type. * @return returned object. */ public abstract R accept(PhysicalPlanNodeVisitor visitor, C context); @@ -38,9 +41,7 @@ public void add(Split split) { } public ExecutionEngine.Schema schema() { - throw new IllegalStateException( - String.format( - "[BUG] schema can been only applied to " + "ProjectOperator, instead of %s", - this.getClass().getSimpleName())); + throw new IllegalStateException(String.format("[BUG] schema can been only applied to " + + "ProjectOperator, instead of %s", this.getClass().getSimpleName())); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java index 147f0e08dc..8c10c91fb6 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -22,7 +23,9 @@ import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.expression.window.WindowDefinition; -/** Physical Plan DSL. */ +/** + * Physical Plan DSL. + */ @UtilityClass public class PhysicalPlanDSL { @@ -44,10 +47,8 @@ public static ProjectOperator project(PhysicalPlan input, NamedExpression... fie return new ProjectOperator(input, Arrays.asList(fields), ImmutableList.of()); } - public static ProjectOperator project( - PhysicalPlan input, - List fields, - List namedParseExpressions) { + public static ProjectOperator project(PhysicalPlan input, List fields, + List namedParseExpressions) { return new ProjectOperator(input, fields, namedParseExpressions); } @@ -60,7 +61,8 @@ public static EvalOperator eval( return new EvalOperator(input, Arrays.asList(expressions)); } - public static SortOperator sort(PhysicalPlan input, Pair... sorts) { + public static SortOperator sort(PhysicalPlan input, Pair... sorts) { return new SortOperator(input, Arrays.asList(sorts)); } @@ -78,27 +80,22 @@ public static DedupeOperator dedupe( input, Arrays.asList(expressions), allowedDuplication, keepEmpty, consecutive); } - public WindowOperator window( - PhysicalPlan input, NamedExpression windowFunction, WindowDefinition windowDefinition) { + public WindowOperator window(PhysicalPlan input, + NamedExpression windowFunction, + WindowDefinition windowDefinition) { return new WindowOperator(input, windowFunction, windowDefinition); } - public static RareTopNOperator rareTopN( - PhysicalPlan input, - CommandType commandType, - List groups, - Expression... expressions) { + public static RareTopNOperator rareTopN(PhysicalPlan input, CommandType commandType, + List groups, Expression... expressions) { return new RareTopNOperator(input, commandType, Arrays.asList(expressions), groups); } - public static RareTopNOperator rareTopN( - PhysicalPlan input, - CommandType commandType, - int noOfResults, - List groups, - Expression... expressions) { - return new RareTopNOperator( - input, commandType, noOfResults, Arrays.asList(expressions), groups); + public static RareTopNOperator rareTopN(PhysicalPlan input, CommandType commandType, + int noOfResults, + List groups, Expression... expressions) { + return new RareTopNOperator(input, commandType, noOfResults, Arrays.asList(expressions), + groups); } @SafeVarargs @@ -111,7 +108,9 @@ public static LimitOperator limit(PhysicalPlan input, Integer limit, Integer off } public static NestedOperator nested( - PhysicalPlan input, Set args, Map> groupedFieldsByPath) { + PhysicalPlan input, + Set args, + Map> groupedFieldsByPath) { return new NestedOperator(input, args, groupedFieldsByPath); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java index 99b5cc8020..1e8f08d39f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import org.opensearch.sql.storage.TableScanOperator; @@ -71,7 +72,7 @@ public R visitValues(ValuesOperator node, C context) { public R visitSort(SortOperator node, C context) { return visitNode(node, context); } - + public R visitRareTopN(RareTopNOperator node, C context) { return visitNode(node, context); } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java index 55422dacd3..1699c97c15 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableMap; @@ -26,14 +27,19 @@ import org.opensearch.sql.expression.parse.ParseExpression; import org.opensearch.sql.planner.SerializablePlan; -/** Project the fields specified in {@link ProjectOperator#projectList} from input. */ +/** + * Project the fields specified in {@link ProjectOperator#projectList} from input. + */ @ToString @EqualsAndHashCode(callSuper = false) @AllArgsConstructor public class ProjectOperator extends PhysicalPlan implements SerializablePlan { - @Getter private PhysicalPlan input; - @Getter private List projectList; - @Getter private List namedParseExpressions; + @Getter + private PhysicalPlan input; + @Getter + private List projectList; + @Getter + private List namedParseExpressions; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { @@ -59,20 +65,17 @@ public ExprValue next() { // TODO needs a better implementation, see https://github.com/opensearch-project/sql/issues/458 for (NamedExpression expr : projectList) { ExprValue exprValue = expr.valueOf(inputValue.bindingTuples()); - Optional optionalParseExpression = - namedParseExpressions.stream() - .filter(parseExpr -> parseExpr.getNameOrAlias().equals(expr.getNameOrAlias())) - .findFirst(); + Optional optionalParseExpression = namedParseExpressions.stream() + .filter(parseExpr -> parseExpr.getNameOrAlias().equals(expr.getNameOrAlias())) + .findFirst(); if (optionalParseExpression.isEmpty()) { mapBuilder.put(expr.getNameOrAlias(), exprValue); continue; } NamedExpression parseExpression = optionalParseExpression.get(); - ExprValue sourceFieldValue = - inputValue - .bindingTuples() - .resolve(((ParseExpression) parseExpression.getDelegated()).getSourceField()); + ExprValue sourceFieldValue = inputValue.bindingTuples() + .resolve(((ParseExpression) parseExpression.getDelegated()).getSourceField()); if (sourceFieldValue.isMissing()) { // source field will be missing after stats command, read from inputValue if it exists // otherwise do nothing since it should not appear as a field @@ -91,17 +94,15 @@ public ExprValue next() { @Override public ExecutionEngine.Schema schema() { - return new ExecutionEngine.Schema( - getProjectList().stream() - .map( - expr -> - new ExecutionEngine.Schema.Column(expr.getName(), expr.getAlias(), expr.type())) - .collect(Collectors.toList())); + return new ExecutionEngine.Schema(getProjectList().stream() + .map(expr -> new ExecutionEngine.Schema.Column(expr.getName(), + expr.getAlias(), expr.type())).collect(Collectors.toList())); } /** Don't use, it is for deserialization needs only. */ @Deprecated - public ProjectOperator() {} + public ProjectOperator() { + } @SuppressWarnings("unchecked") @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java index ecf997f7ae..fb3a91e2e6 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import com.google.common.annotations.VisibleForTesting; @@ -35,38 +36,40 @@ @EqualsAndHashCode(callSuper = false) public class RareTopNOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; - @Getter private final CommandType commandType; - @Getter private final Integer noOfResults; - @Getter private final List fieldExprList; - @Getter private final List groupByExprList; - - @EqualsAndHashCode.Exclude private final Group group; - @EqualsAndHashCode.Exclude private Iterator iterator; + @Getter + private final PhysicalPlan input; + @Getter + private final CommandType commandType; + @Getter + private final Integer noOfResults; + @Getter + private final List fieldExprList; + @Getter + private final List groupByExprList; + + @EqualsAndHashCode.Exclude + private final Group group; + @EqualsAndHashCode.Exclude + private Iterator iterator; private static final Integer DEFAULT_NO_OF_RESULTS = 10; - public RareTopNOperator( - PhysicalPlan input, - CommandType commandType, - List fieldExprList, - List groupByExprList) { + + public RareTopNOperator(PhysicalPlan input, CommandType commandType, + List fieldExprList, List groupByExprList) { this(input, commandType, DEFAULT_NO_OF_RESULTS, fieldExprList, groupByExprList); } /** * RareTopNOperator Constructor. * - * @param input Input {@link PhysicalPlan} - * @param commandType Enum for Rare/TopN command. - * @param noOfResults Number of results - * @param fieldExprList List of {@link Expression} + * @param input Input {@link PhysicalPlan} + * @param commandType Enum for Rare/TopN command. + * @param noOfResults Number of results + * @param fieldExprList List of {@link Expression} * @param groupByExprList List of group by {@link Expression} */ - public RareTopNOperator( - PhysicalPlan input, - CommandType commandType, - int noOfResults, + public RareTopNOperator(PhysicalPlan input, CommandType commandType, int noOfResults, List fieldExprList, List groupByExprList) { this.input = input; @@ -112,50 +115,48 @@ public class Group { private final Map> groupListMap = new HashMap<>(); - /** Push the BindingTuple to Group. */ + /** + * Push the BindingTuple to Group. + */ public void push(ExprValue inputValue) { Key groupKey = new Key(inputValue, groupByExprList); Key fieldKey = new Key(inputValue, fieldExprList); - groupListMap.computeIfAbsent( - groupKey, - k -> { - Map map = new HashMap<>(); - map.put(fieldKey, 1); - return map; - }); - groupListMap.computeIfPresent( - groupKey, - (key, map) -> { - map.computeIfAbsent(fieldKey, f -> 1); - map.computeIfPresent( - fieldKey, - (field, count) -> { - return count + 1; - }); - return map; - }); + groupListMap.computeIfAbsent(groupKey, k -> { + Map map = new HashMap<>(); + map.put(fieldKey, 1); + return map; + }); + groupListMap.computeIfPresent(groupKey, (key, map) -> { + map.computeIfAbsent(fieldKey, f -> 1); + map.computeIfPresent(fieldKey, (field, count) -> { + return count + 1; + }); + return map; + }); } - /** Get the list of {@link BindingTuple} for each group. */ + /** + * Get the list of {@link BindingTuple} for each group. + */ public List result() { ImmutableList.Builder resultBuilder = new ImmutableList.Builder<>(); - groupListMap.forEach( - (groups, fieldMap) -> { - Map map = new LinkedHashMap<>(); - List result = find(fieldMap); - result.forEach( - field -> { - map.putAll(groups.keyMap(groupByExprList)); - map.putAll(field.keyMap(fieldExprList)); - resultBuilder.add(ExprTupleValue.fromExprValueMap(map)); - }); - }); + groupListMap.forEach((groups, fieldMap) -> { + Map map = new LinkedHashMap<>(); + List result = find(fieldMap); + result.forEach(field -> { + map.putAll(groups.keyMap(groupByExprList)); + map.putAll(field.keyMap(fieldExprList)); + resultBuilder.add(ExprTupleValue.fromExprValueMap(map)); + }); + }); return resultBuilder.build(); } - /** Get a list of result. */ + /** + * Get a list of result. + */ public List find(Map map) { Comparator> valueComparator; if (CommandType.TOP.equals(commandType)) { @@ -164,37 +165,40 @@ public List find(Map map) { valueComparator = Map.Entry.comparingByValue(); } - return map.entrySet().stream() - .sorted(valueComparator) - .limit(noOfResults) - .map(Map.Entry::getKey) - .collect(Collectors.toList()); + return map.entrySet().stream().sorted(valueComparator).limit(noOfResults) + .map(Map.Entry::getKey).collect(Collectors.toList()); } } - /** Key. */ + /** + * Key. + */ @EqualsAndHashCode @VisibleForTesting public class Key { private final List valueList; - /** Key constructor. */ + /** + * Key constructor. + */ public Key(ExprValue value, List exprList) { - this.valueList = - exprList.stream() - .map(expr -> expr.valueOf(value.bindingTuples())) - .collect(Collectors.toList()); + this.valueList = exprList.stream() + .map(expr -> expr.valueOf(value.bindingTuples())).collect(Collectors.toList()); } - /** Return the Map of key and key value. */ + /** + * Return the Map of key and key value. + */ public Map keyMap(List exprList) { return Streams.zip( - exprList.stream().map(expression -> expression.toString()), - valueList.stream(), - AbstractMap.SimpleEntry::new) - .collect(Collectors.toMap(key -> key.getKey(), key -> key.getValue())); + exprList.stream().map( + expression -> expression.toString()), + valueList.stream(), + AbstractMap.SimpleEntry::new + ).collect(Collectors.toMap(key -> key.getKey(), key -> key.getValue())); } } + } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java index b4a724aa7a..3fa3519d10 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; @@ -23,19 +24,26 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.expression.ReferenceExpression; -/** Remove the fields specified in {@link RemoveOperator#removeList} from input. */ +/** + * Remove the fields specified in {@link RemoveOperator#removeList} from input. + */ @ToString @EqualsAndHashCode(callSuper = false) public class RemoveOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; - @Getter private final Set removeList; - @ToString.Exclude @EqualsAndHashCode.Exclude private final Set nameRemoveList; + @Getter + private final PhysicalPlan input; + @Getter + private final Set removeList; + @ToString.Exclude + @EqualsAndHashCode.Exclude + private final Set nameRemoveList; /** * Todo. This is the temporary solution that add the mapping between string and ref. because when * rename the field from input, there we can only get the string field. */ - public RemoveOperator(PhysicalPlan input, Set removeList) { + public RemoveOperator(PhysicalPlan input, + Set removeList) { this.input = input; this.removeList = removeList; this.nameRemoveList = diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java index e6f97dab4a..f0b0d13c50 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; @@ -23,30 +24,35 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** - * Rename the binding name in {@link BindingTuple}. The mapping maintain the relation between source - * and target. it means BindingTuple.resolve(target) = BindingTuple.resolve(source). + * Rename the binding name in {@link BindingTuple}. + * The mapping maintain the relation between source and target. + * it means BindingTuple.resolve(target) = BindingTuple.resolve(source). */ @EqualsAndHashCode(callSuper = false) @ToString public class RenameOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; - @Getter private final Map mapping; - + @Getter + private final PhysicalPlan input; + @Getter + private final Map mapping; /** * Todo. This is the temporary solution that add the mapping between string and ref. because when * rename the field from input, there we can only get the string field. */ - @ToString.Exclude @EqualsAndHashCode.Exclude + @ToString.Exclude + @EqualsAndHashCode.Exclude private final Map nameMapping; - /** Constructor of RenameOperator. */ - public RenameOperator(PhysicalPlan input, Map mapping) { + /** + * Constructor of RenameOperator. + */ + public RenameOperator(PhysicalPlan input, + Map mapping) { this.input = input; this.mapping = mapping; this.nameMapping = - mapping.entrySet().stream() - .collect( - Collectors.toMap(entry -> entry.getKey().getAttr(), entry -> entry.getValue())); + mapping.entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey().getAttr(), + entry -> entry.getValue())); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java index e3116baedf..4463892ca5 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import static org.opensearch.sql.ast.tree.Sort.NullOrder.NULL_FIRST; @@ -27,26 +28,30 @@ /** * Sort Operator.The input data is sorted by the sort fields in the {@link SortOperator#sortList}. - * The sort field is specified by the {@link Expression} with {@link SortOption}. The count indicate - * how many sorted result should been return. + * The sort field is specified by the {@link Expression} with {@link SortOption}. + * The count indicate how many sorted result should been return. */ @ToString @EqualsAndHashCode(callSuper = false) public class SortOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; + @Getter + private final PhysicalPlan input; - @Getter private final List> sortList; - @EqualsAndHashCode.Exclude private final Sorter sorter; - @EqualsAndHashCode.Exclude private Iterator iterator; + @Getter + private final List> sortList; + @EqualsAndHashCode.Exclude + private final Sorter sorter; + @EqualsAndHashCode.Exclude + private Iterator iterator; /** * Sort Operator Constructor. - * * @param input input {@link PhysicalPlan} - * @param sortList list of sort sort field. The sort field is specified by the {@link Expression} - * with {@link SortOption} + * @param sortList list of sort sort field. + * The sort field is specified by the {@link Expression} with {@link SortOption} */ - public SortOperator(PhysicalPlan input, List> sortList) { + public SortOperator( + PhysicalPlan input, List> sortList) { this.input = input; this.sortList = sortList; SorterBuilder sorterBuilder = Sorter.builder(); @@ -96,7 +101,8 @@ public ExprValue next() { @Builder public static class Sorter implements Comparator { - @Singular private final List>> comparators; + @Singular + private final List>> comparators; @Override public int compare(ExprValue o1, ExprValue o2) { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java index 4a4ce27da8..4ac9d6a30a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -17,15 +18,22 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.LiteralExpression; -/** Physical operator for Values. */ +/** + * Physical operator for Values. + */ @ToString @EqualsAndHashCode(callSuper = false, of = "values") public class ValuesOperator extends PhysicalPlan { - /** Original values list for print and equality check. */ - @Getter private final List> values; + /** + * Original values list for print and equality check. + */ + @Getter + private final List> values; - /** Values iterator. */ + /** + * Values iterator. + */ private final Iterator> valuesIterator; public ValuesOperator(List> values) { @@ -50,8 +58,10 @@ public boolean hasNext() { @Override public ExprValue next() { - List values = - valuesIterator.next().stream().map(Expression::valueOf).collect(Collectors.toList()); + List values = valuesIterator.next().stream() + .map(Expression::valueOf) + .collect(Collectors.toList()); return new ExprCollectionValue(values); } + } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java index 10377ce47a..8ecdcfbf49 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableMap; @@ -20,34 +21,43 @@ import org.opensearch.sql.expression.window.WindowFunctionExpression; import org.opensearch.sql.expression.window.frame.WindowFrame; -/** Physical operator for window function computation. */ +/** + * Physical operator for window function computation. + */ @EqualsAndHashCode(callSuper = false) @ToString public class WindowOperator extends PhysicalPlan { - @Getter private final PhysicalPlan input; + @Getter + private final PhysicalPlan input; - @Getter private final NamedExpression windowFunction; + @Getter + private final NamedExpression windowFunction; - @Getter private final WindowDefinition windowDefinition; + @Getter + private final WindowDefinition windowDefinition; - @EqualsAndHashCode.Exclude @ToString.Exclude private final WindowFrame windowFrame; + @EqualsAndHashCode.Exclude + @ToString.Exclude + private final WindowFrame windowFrame; /** - * Peeking iterator that can peek next element which is required by window frame such as peer - * frame to prefetch all rows related to same peer (of same sorting key). + * Peeking iterator that can peek next element which is required + * by window frame such as peer frame to prefetch all rows related + * to same peer (of same sorting key). */ - @EqualsAndHashCode.Exclude @ToString.Exclude + @EqualsAndHashCode.Exclude + @ToString.Exclude private final PeekingIterator peekingIterator; /** * Initialize window operator. - * - * @param input child operator - * @param windowFunction window function - * @param windowDefinition window definition + * @param input child operator + * @param windowFunction window function + * @param windowDefinition window definition */ - public WindowOperator( - PhysicalPlan input, NamedExpression windowFunction, WindowDefinition windowDefinition) { + public WindowOperator(PhysicalPlan input, + NamedExpression windowFunction, + WindowDefinition windowDefinition) { this.input = input; this.windowFunction = windowFunction; this.windowDefinition = windowDefinition; @@ -97,4 +107,5 @@ private void addWindowFunctionResultColumn(ImmutableMap.Builder supplier; /** - * Map from bucketKey to nested collector sorted by key to make sure final result is in order - * after traversal. + * Map from bucketKey to nested collector sorted by key to make sure + * final result is in order after traversal. */ private final Map collectorMap = new TreeMap<>(); - /** Bucket Index. */ + /** + * Bucket Index. + */ private int bucketIndex = 0; /** - * Collect Bucket from {@link BindingTuple}. If bucket not exist, create new bucket and {@link - * Collector}. If bucket exist, let {@link Collector} in the bucket collect from {@link - * BindingTuple}. + * Collect Bucket from {@link BindingTuple}. + * If bucket not exist, create new bucket and {@link Collector}. + * If bucket exist, let {@link Collector} in the bucket collect from {@link BindingTuple}. * * @param input {@link BindingTuple}. */ @@ -56,7 +64,6 @@ public void collect(BindingTuple input) { /** * Bucket Key. - * * @param tuple {@link BindingTuple}. * @return Bucket Key. */ diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java index e696d5068f..a2b3a41a27 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java @@ -13,7 +13,9 @@ import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.storage.bindingtuple.BindingTuple; -/** Interface of {@link BindingTuple} Collector. */ +/** + * Interface of {@link BindingTuple} Collector. + */ public interface Collector { /** @@ -30,12 +32,16 @@ public interface Collector { */ List results(); - /** {@link Collector} tree builder. */ + /** + * {@link Collector} tree builder. + */ @UtilityClass class Builder { - /** build {@link Collector}. */ - public static Collector build( - List buckets, List aggregators) { + /** + * build {@link Collector}. + */ + public static Collector build(List buckets, + List aggregators) { if (buckets.isEmpty()) { return new MetricCollector(aggregators); } else { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java index 2cfa3c9457..c804c7bc9b 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java @@ -22,7 +22,9 @@ */ public class MetricCollector implements Collector { - /** List of {@link NamedAggregator}. */ + /** + * List of {@link NamedAggregator}. + */ private final List> aggregators; /** diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java index 81a1a0230f..782c931046 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java @@ -34,11 +34,15 @@ import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.utils.DateTimeUtils; -/** Rounding. */ +/** + * Rounding. + */ @EqualsAndHashCode public abstract class Rounding { - /** Create Rounding instance. */ + /** + * Create Rounding instance. + */ public static Rounding createRounding(SpanExpression span) { ExprValue interval = span.getValue().valueOf(); ExprType type = span.type(); @@ -66,6 +70,7 @@ public static Rounding createRounding(SpanExpression span) { public abstract ExprValue round(ExprValue value); + static class TimestampRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -77,13 +82,13 @@ public TimestampRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { - Instant instant = - Instant.ofEpochMilli( - dateTimeUnit.round(var.timestampValue().toEpochMilli(), interval.integerValue())); + Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.timestampValue() + .toEpochMilli(), interval.integerValue())); return new ExprTimestampValue(instant); } } + static class DatetimeRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -95,15 +100,13 @@ public DatetimeRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { - Instant instant = - Instant.ofEpochMilli( - dateTimeUnit.round( - var.datetimeValue().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), - interval.integerValue())); + Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.datetimeValue() + .atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), interval.integerValue())); return new ExprDatetimeValue(instant.atZone(UTC_ZONE_ID).toLocalDateTime()); } } + static class DateRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -115,11 +118,8 @@ public DateRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { - Instant instant = - Instant.ofEpochMilli( - dateTimeUnit.round( - var.dateValue().atStartOfDay().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), - interval.integerValue())); + Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.dateValue().atStartOfDay() + .atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), interval.integerValue())); return new ExprDateValue(instant.atZone(UTC_ZONE_ID).toLocalDate()); } } @@ -136,18 +136,17 @@ public TimeRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { if (dateTimeUnit.id > 4) { - throw new ExpressionEvaluationException( - String.format("Unable to set span unit %s for TIME type", dateTimeUnit.getName())); + throw new ExpressionEvaluationException(String + .format("Unable to set span unit %s for TIME type", dateTimeUnit.getName())); } - Instant instant = - Instant.ofEpochMilli( - dateTimeUnit.round( - var.timeValue().getLong(ChronoField.MILLI_OF_DAY), interval.integerValue())); + Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.timeValue().getLong( + ChronoField.MILLI_OF_DAY), interval.integerValue())); return new ExprTimeValue(instant.atZone(UTC_ZONE_ID).toLocalTime()); } } + static class LongRounding extends Rounding { private final Long longInterval; @@ -162,6 +161,7 @@ public ExprValue round(ExprValue value) { } } + static class DoubleRounding extends Rounding { private final Double doubleInterval; @@ -171,12 +171,13 @@ protected DoubleRounding(ExprValue interval) { @Override public ExprValue round(ExprValue value) { - double rounded = - Double.valueOf(value.doubleValue() / doubleInterval).intValue() * doubleInterval; + double rounded = Double + .valueOf(value.doubleValue() / doubleInterval).intValue() * doubleInterval; return ExprValueUtils.doubleValue(rounded); } } + @RequiredArgsConstructor static class UnknownRounding extends Rounding { @Override @@ -185,37 +186,43 @@ public ExprValue round(ExprValue var) { } } + @RequiredArgsConstructor public enum DateTimeUnit { - MILLISECOND(1, "ms", true, ChronoField.MILLI_OF_SECOND.getBaseUnit().getDuration().toMillis()) { + MILLISECOND(1, "ms", true, ChronoField.MILLI_OF_SECOND + .getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - SECOND(2, "s", true, ChronoField.SECOND_OF_MINUTE.getBaseUnit().getDuration().toMillis()) { + SECOND(2, "s", true, ChronoField.SECOND_OF_MINUTE + .getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - MINUTE(3, "m", true, ChronoField.MINUTE_OF_HOUR.getBaseUnit().getDuration().toMillis()) { + MINUTE(3, "m", true, ChronoField.MINUTE_OF_HOUR + .getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - HOUR(4, "h", true, ChronoField.HOUR_OF_DAY.getBaseUnit().getDuration().toMillis()) { + HOUR(4, "h", true, ChronoField.HOUR_OF_DAY + .getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - DAY(5, "d", true, ChronoField.DAY_OF_MONTH.getBaseUnit().getDuration().toMillis()) { + DAY(5, "d", true, ChronoField.DAY_OF_MONTH + .getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); @@ -250,14 +257,18 @@ long round(long utcMillis, int interval) { } }; - @Getter private final int id; - @Getter private final String name; + @Getter + private final int id; + @Getter + private final String name; protected final boolean isMillisBased; protected final long ratio; abstract long round(long utcMillis, int interval); - /** Resolve the date time unit. */ + /** + * Resolve the date time unit. + */ public static Rounding.DateTimeUnit resolve(String name) { switch (name) { case "M": @@ -272,4 +283,5 @@ public static Rounding.DateTimeUnit resolve(String name) { } } } + } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java index 5542d0f0e4..105ad5ed32 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java @@ -19,9 +19,12 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.storage.Table; + /** - * Table implementation to handle show datasources command. Since datasource information is not tied - * to any storage engine, this info is handled via DataSource Table. + * Table implementation to handle show datasources command. + * Since datasource information is not tied to any storage engine, this info + * is handled via DataSource Table. + * */ @RequiredArgsConstructor @EqualsAndHashCode @@ -41,7 +44,8 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public static class DataSourceTableDefaultImplementor extends DefaultImplementor { + public static class DataSourceTableDefaultImplementor + extends DefaultImplementor { private final DataSourceService dataSourceService; @@ -50,4 +54,5 @@ public PhysicalPlan visitRelation(LogicalRelation node, Object context) { return new DataSourceTableScan(dataSourceService); } } + } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java index bc92df7d16..93e65054b5 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java @@ -22,9 +22,11 @@ import org.opensearch.sql.storage.TableScanOperator; /** - * This class handles table scan of data source table. Right now these are derived from - * dataSourceService thorough static fields. In future this might scan data from underlying - * datastore if we start persisting datasource info somewhere. + * This class handles table scan of data source table. + * Right now these are derived from dataSourceService thorough static fields. + * In future this might scan data from underlying datastore if we start + * persisting datasource info somewhere. + * */ public class DataSourceTableScan extends TableScanOperator { @@ -45,16 +47,15 @@ public String explain() { @Override public void open() { List exprValues = new ArrayList<>(); - Set dataSourceMetadataSet = dataSourceService.getDataSourceMetadata(true); + Set dataSourceMetadataSet + = dataSourceService.getDataSourceMetadata(true); for (DataSourceMetadata dataSourceMetadata : dataSourceMetadataSet) { exprValues.add( - new ExprTupleValue( - new LinkedHashMap<>( - ImmutableMap.of( - "DATASOURCE_NAME", - ExprValueUtils.stringValue(dataSourceMetadata.getName()), - "CONNECTOR_TYPE", - ExprValueUtils.stringValue(dataSourceMetadata.getConnector().name()))))); + new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( + "DATASOURCE_NAME", + ExprValueUtils.stringValue(dataSourceMetadata.getName()), + "CONNECTOR_TYPE", + ExprValueUtils.stringValue(dataSourceMetadata.getConnector().name()))))); } iterator = exprValues.iterator(); } @@ -68,4 +69,5 @@ public boolean hasNext() { public ExprValue next() { return iterator.next(); } + } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java index 469305a15d..dd959d9b56 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java @@ -13,16 +13,19 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.type.ExprType; -/** Definition of the data source table schema. */ +/** + * Definition of the data source table schema. + */ @Getter @RequiredArgsConstructor public enum DataSourceTableSchema { - DATASOURCE_TABLE_SCHEMA( - new LinkedHashMap<>() { - { - put("DATASOURCE_NAME", STRING); - put("CONNECTOR_TYPE", STRING); - } - }); + + DATASOURCE_TABLE_SCHEMA(new LinkedHashMap<>() { + { + put("DATASOURCE_NAME", STRING); + put("CONNECTOR_TYPE", STRING); + } + } + ); private final Map mapping; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java b/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java index 87ff048531..18eb10f19d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java @@ -8,8 +8,8 @@ import lombok.Data; /** - * Stream context required by stream processing components and can be stored and restored between - * executions. + * Stream context required by stream processing components and can be + * stored and restored between executions. */ @Data public class StreamContext { diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java index 49a91dd9cc..63d6a5b163 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java @@ -7,7 +7,9 @@ import lombok.RequiredArgsConstructor; -/** Watermark generator that generates watermark with bounded out-of-order delay. */ +/** + * Watermark generator that generates watermark with bounded out-of-order delay. + */ @RequiredArgsConstructor public class BoundedOutOfOrderWatermarkGenerator implements WatermarkGenerator { diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java index e4a44e5169..4f4c9a8a00 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java @@ -6,8 +6,8 @@ package org.opensearch.sql.planner.streaming.watermark; /** - * A watermark generator generates watermark timestamp based on some strategy which is defined in - * implementation class. + * A watermark generator generates watermark timestamp based on some strategy which is defined + * in implementation class. */ public interface WatermarkGenerator { @@ -18,4 +18,5 @@ public interface WatermarkGenerator { * @return watermark timestamp in millisecond */ long generate(long timestamp); + } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java index 3d5b180346..2a85ea391c 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java @@ -7,7 +7,9 @@ import lombok.Data; -/** A time window is a window of time interval with inclusive start time and exclusive end time. */ +/** + * A time window is a window of time interval with inclusive start time and exclusive end time. + */ @Data public class Window { @@ -17,7 +19,9 @@ public class Window { /** End timestamp (exclusive) of the time window. */ private final long endTime; - /** Return the maximum timestamp (inclusive) of the window. */ + /** + * Return the maximum timestamp (inclusive) of the window. + */ public long maxTimestamp() { return endTime - 1; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java index 1b1f12a573..f0f47fd575 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java @@ -12,8 +12,8 @@ import org.opensearch.sql.utils.DateTimeUtils; /** - * A sliding window assigner assigns multiple overlapped window per event timestamp. The overlap - * size is determined by the given slide interval. + * A sliding window assigner assigns multiple overlapped window per event timestamp. + * The overlap size is determined by the given slide interval. */ public class SlidingWindowAssigner implements WindowAssigner { @@ -27,13 +27,13 @@ public class SlidingWindowAssigner implements WindowAssigner { * Create sliding window assigner with the given window and slide size in millisecond. * * @param windowSize window size in millisecond - * @param slideSize slide size in millisecond + * @param slideSize slide size in millisecond */ public SlidingWindowAssigner(long windowSize, long slideSize) { - Preconditions.checkArgument( - windowSize > 0, "Window size [%s] must be positive number", windowSize); - Preconditions.checkArgument( - slideSize > 0, "Slide size [%s] must be positive number", slideSize); + Preconditions.checkArgument(windowSize > 0, + "Window size [%s] must be positive number", windowSize); + Preconditions.checkArgument(slideSize > 0, + "Slide size [%s] must be positive number", slideSize); this.windowSize = windowSize; this.slideSize = slideSize; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java index 2591689a35..192bb6c429 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java @@ -11,7 +11,9 @@ import org.opensearch.sql.planner.streaming.windowing.Window; import org.opensearch.sql.utils.DateTimeUtils; -/** A tumbling window assigner assigns a single window per event timestamp without overlap. */ +/** + * A tumbling window assigner assigns a single window per event timestamp without overlap. + */ public class TumblingWindowAssigner implements WindowAssigner { /** Window size in millisecond. */ @@ -23,8 +25,8 @@ public class TumblingWindowAssigner implements WindowAssigner { * @param windowSize window size in millisecond */ public TumblingWindowAssigner(long windowSize) { - Preconditions.checkArgument( - windowSize > 0, "Window size [%s] must be positive number", windowSize); + Preconditions.checkArgument(windowSize > 0, + "Window size [%s] must be positive number", windowSize); this.windowSize = windowSize; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java index fd615c2d5e..dac882c5ff 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java @@ -9,16 +9,16 @@ import org.opensearch.sql.planner.streaming.windowing.Window; /** - * A window assigner assigns zero or more window to an event timestamp based on different windowing - * approach. + * A window assigner assigns zero or more window to an event timestamp + * based on different windowing approach. */ public interface WindowAssigner { /** * Return window(s) assigned to the timestamp. - * * @param timestamp given event timestamp * @return windows assigned */ List assign(long timestamp); + } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java index f614ce847e..1801880961 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java @@ -11,8 +11,8 @@ /** * After watermark window trigger fires window state output once a window is below watermark. - * Precisely speaking, after watermark means the window boundary (max timestamp) is equal to or less - * than the current watermark timestamp. + * Precisely speaking, after watermark means the window boundary (max timestamp) is equal to + * or less than the current watermark timestamp. */ @RequiredArgsConstructor public class AfterWatermarkWindowTrigger implements WindowTrigger { diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java index 30dba22725..465f0aa9eb 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java @@ -8,7 +8,9 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** Result determined by a trigger for what should happen to the window. */ +/** + * Result determined by a trigger for what should happen to the window. + */ @Getter @RequiredArgsConstructor public enum TriggerResult { diff --git a/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java b/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java index 69d902c1d7..8512eddbe3 100644 --- a/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java +++ b/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java @@ -19,9 +19,14 @@ * {@link DataSourceFactory}. */ public interface DataSourceFactory { - /** Get {@link DataSourceType}. */ + /** + * Get {@link DataSourceType}. + */ DataSourceType getDataSourceType(); - /** Create {@link DataSource}. */ + /** + * Create {@link DataSource}. + */ DataSource createDataSource(DataSourceMetadata metadata); + } diff --git a/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java b/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java index c3b54beaaa..ffcc0911de 100644 --- a/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java +++ b/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.storage; import java.util.Collection; @@ -10,10 +11,14 @@ import org.opensearch.sql.DataSourceSchemaName; import org.opensearch.sql.expression.function.FunctionResolver; -/** Storage engine for different storage to provide data access API implementation. */ +/** + * Storage engine for different storage to provide data access API implementation. + */ public interface StorageEngine { - /** Get {@link Table} from storage engine. */ + /** + * Get {@link Table} from storage engine. + */ Table getTable(DataSourceSchemaName dataSourceSchemaName, String tableName); /** diff --git a/core/src/main/java/org/opensearch/sql/storage/Table.java b/core/src/main/java/org/opensearch/sql/storage/Table.java index 33dbd7d66d..fc1def5a2e 100644 --- a/core/src/main/java/org/opensearch/sql/storage/Table.java +++ b/core/src/main/java/org/opensearch/sql/storage/Table.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.storage; import java.util.Map; @@ -14,7 +15,9 @@ import org.opensearch.sql.storage.read.TableScanBuilder; import org.opensearch.sql.storage.write.TableWriteBuilder; -/** Table. */ +/** + * Table. + */ public interface Table { /** @@ -35,10 +38,14 @@ default void create(Map schema) { throw new UnsupportedOperationException("Unsupported Operation"); } - /** Get the {@link ExprType} for each field in the table. */ + /** + * Get the {@link ExprType} for each field in the table. + */ Map getFieldTypes(); - /** Get the {@link ExprType} for each meta-field (reserved fields) in the table. */ + /** + * Get the {@link ExprType} for each meta-field (reserved fields) in the table. + */ default Map getReservedFieldTypes() { return Map.of(); } @@ -54,8 +61,8 @@ default Map getReservedFieldTypes() { PhysicalPlan implement(LogicalPlan plan); /** - * Optimize the {@link LogicalPlan} by storage engine rule. The default optimize solution is no - * optimization. + * Optimize the {@link LogicalPlan} by storage engine rule. + * The default optimize solution is no optimization. * * @param plan logical plan. * @return logical plan. @@ -82,11 +89,15 @@ default TableScanBuilder createScanBuilder() { * @return table write builder */ default TableWriteBuilder createWriteBuilder(LogicalWrite plan) { - throw new UnsupportedOperationException("Write operation is not supported on current table"); + throw new UnsupportedOperationException( + "Write operation is not supported on current table"); } - /** Translate {@link Table} to {@link StreamingSource} if possible. */ + /** + * Translate {@link Table} to {@link StreamingSource} if possible. + */ default StreamingSource asStreamingSource() { throw new UnsupportedOperationException(); } + } diff --git a/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java b/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java index 130516b3ef..1b8e33bc4f 100644 --- a/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java +++ b/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.storage; import java.util.Collections; @@ -11,8 +12,8 @@ import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; /** - * Abstract table scan class for different storage to implement. This is also to avoid "polluting" - * physical plan visitor by concrete table scan implementation. + * Abstract table scan class for different storage to implement. + * This is also to avoid "polluting" physical plan visitor by concrete table scan implementation. */ public abstract class TableScanOperator extends PhysicalPlan { diff --git a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java index 2487c651ad..51a0348116 100644 --- a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java +++ b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.storage.bindingtuple; import org.opensearch.sql.data.model.ExprMissingValue; @@ -13,19 +14,20 @@ import org.opensearch.sql.expression.env.Environment; /** - * BindingTuple represents the a relationship between bindingName and ExprValue. e.g. The operation - * output column name is bindingName, the value is the ExprValue. + * BindingTuple represents the a relationship between bindingName and ExprValue. + * e.g. The operation output column name is bindingName, the value is the ExprValue. */ public abstract class BindingTuple implements Environment { - public static BindingTuple EMPTY = - new BindingTuple() { - @Override - public ExprValue resolve(ReferenceExpression ref) { - return ExprMissingValue.of(); - } - }; + public static BindingTuple EMPTY = new BindingTuple() { + @Override + public ExprValue resolve(ReferenceExpression ref) { + return ExprMissingValue.of(); + } + }; - /** Resolve {@link Expression} in the BindingTuple environment. */ + /** + * Resolve {@link Expression} in the BindingTuple environment. + */ @Override public ExprValue resolve(Expression var) { if (var instanceof ReferenceExpression) { @@ -35,6 +37,8 @@ public ExprValue resolve(Expression var) { } } - /** Resolve the {@link ReferenceExpression} in BindingTuple context. */ + /** + * Resolve the {@link ReferenceExpression} in BindingTuple context. + */ public abstract ExprValue resolve(ReferenceExpression ref); } diff --git a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java index d43a3f2a1b..4589731442 100644 --- a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java +++ b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.storage.bindingtuple; import java.util.function.Supplier; @@ -11,7 +12,9 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.ReferenceExpression; -/** Lazy Implementation of {@link BindingTuple}. */ +/** + * Lazy Implementation of {@link BindingTuple}. + */ @RequiredArgsConstructor public class LazyBindingTuple extends BindingTuple { private final Supplier lazyBinding; diff --git a/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java b/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java index b2da0b67a4..f0158c52b8 100644 --- a/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java +++ b/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java @@ -19,14 +19,16 @@ import org.opensearch.sql.storage.TableScanOperator; /** - * A TableScanBuilder represents transition state between logical planning and physical planning for - * table scan operator. The concrete implementation class gets involved in the logical optimization - * through this abstraction and thus get the chance to handle push down optimization without - * intruding core engine. + * A TableScanBuilder represents transition state between logical planning and physical planning + * for table scan operator. The concrete implementation class gets involved in the logical + * optimization through this abstraction and thus get the chance to handle push down optimization + * without intruding core engine. */ public abstract class TableScanBuilder extends LogicalPlan { - /** Construct and initialize children to empty list. */ + /** + * Construct and initialize children to empty list. + */ protected TableScanBuilder() { super(Collections.emptyList()); } @@ -39,8 +41,8 @@ protected TableScanBuilder() { public abstract TableScanOperator build(); /** - * Can a given filter operator be pushed down to table scan builder. Assume no such support by - * default unless subclass override this. + * Can a given filter operator be pushed down to table scan builder. Assume no such support + * by default unless subclass override this. * * @param filter logical filter operator * @return true if pushed down, otherwise false @@ -50,8 +52,8 @@ public boolean pushDownFilter(LogicalFilter filter) { } /** - * Can a given aggregate operator be pushed down to table scan builder. Assume no such support by - * default unless subclass override this. + * Can a given aggregate operator be pushed down to table scan builder. Assume no such support + * by default unless subclass override this. * * @param aggregation logical aggregate operator * @return true if pushed down, otherwise false @@ -61,8 +63,8 @@ public boolean pushDownAggregation(LogicalAggregation aggregation) { } /** - * Can a given sort operator be pushed down to table scan builder. Assume no such support by - * default unless subclass override this. + * Can a given sort operator be pushed down to table scan builder. Assume no such support + * by default unless subclass override this. * * @param sort logical sort operator * @return true if pushed down, otherwise false @@ -72,8 +74,8 @@ public boolean pushDownSort(LogicalSort sort) { } /** - * Can a given limit operator be pushed down to table scan builder. Assume no such support by - * default unless subclass override this. + * Can a given limit operator be pushed down to table scan builder. Assume no such support + * by default unless subclass override this. * * @param limit logical limit operator * @return true if pushed down, otherwise false @@ -83,8 +85,8 @@ public boolean pushDownLimit(LogicalLimit limit) { } /** - * Can a given project operator be pushed down to table scan builder. Assume no such support by - * default unless subclass override this. + * Can a given project operator be pushed down to table scan builder. Assume no such support + * by default unless subclass override this. * * @param project logical project operator * @return true if pushed down, otherwise false @@ -94,8 +96,8 @@ public boolean pushDownProject(LogicalProject project) { } /** - * Can a given highlight operator be pushed down to table scan builder. Assume no such support by - * default unless subclass override this. + * Can a given highlight operator be pushed down to table scan builder. Assume no such support + * by default unless subclass override this. * * @param highlight logical highlight operator * @return true if pushed down, otherwise false @@ -105,8 +107,8 @@ public boolean pushDownHighlight(LogicalHighlight highlight) { } /** - * Can a given nested operator be pushed down to table scan builder. Assume no such support by - * default unless subclass override this. + * Can a given nested operator be pushed down to table scan builder. Assume no such support + * by default unless subclass override this. * * @param nested logical nested operator * @return true if pushed down, otherwise false diff --git a/core/src/main/java/org/opensearch/sql/storage/split/Split.java b/core/src/main/java/org/opensearch/sql/storage/split/Split.java index 1cb0ca57ce..e9e0c6fcc1 100644 --- a/core/src/main/java/org/opensearch/sql/storage/split/Split.java +++ b/core/src/main/java/org/opensearch/sql/storage/split/Split.java @@ -8,14 +8,13 @@ import org.opensearch.sql.storage.StorageEngine; /** - * Split is a sections of a data set. Each {@link StorageEngine} should have specific implementation - * of Split. + * Split is a sections of a data set. Each {@link StorageEngine} should have specific + * implementation of Split. */ public interface Split { /** * Get the split id. - * * @return split id. */ String getSplitId(); diff --git a/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java b/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java index af18916f71..54dfa5d557 100644 --- a/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java +++ b/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java @@ -18,7 +18,9 @@ */ public abstract class TableWriteBuilder extends LogicalPlan { - /** Construct table write builder with child node. */ + /** + * Construct table write builder with child node. + */ public TableWriteBuilder(LogicalPlan child) { super(Collections.singletonList(child)); } @@ -26,7 +28,7 @@ public TableWriteBuilder(LogicalPlan child) { /** * Build table write operator with given child node. * - * @param child child operator node + * @param child child operator node * @return table write operator */ public abstract TableWriteOperator build(PhysicalPlan child); diff --git a/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java b/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java index 18e6541514..39726bc975 100644 --- a/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java +++ b/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.utils; import static java.time.temporal.ChronoField.DAY_OF_MONTH; @@ -21,23 +22,26 @@ import java.util.Locale; import lombok.experimental.UtilityClass; -/** DateTimeFormatter. Reference org.opensearch.common.time.DateFormatters. */ +/** + * DateTimeFormatter. + * Reference org.opensearch.common.time.DateFormatters. + */ @UtilityClass public class DateTimeFormatters { - // Length of a date formatted as YYYYMMDD. + //Length of a date formatted as YYYYMMDD. public static final int FULL_DATE_LENGTH = 8; - // Length of a date formatted as YYMMDD. + //Length of a date formatted as YYMMDD. public static final int SHORT_DATE_LENGTH = 6; - // Length of a date formatted as YMMDD. + //Length of a date formatted as YMMDD. public static final int SINGLE_DIGIT_YEAR_DATE_LENGTH = 5; - // Length of a date formatted as MMDD. + //Length of a date formatted as MMDD. public static final int NO_YEAR_DATE_LENGTH = 4; - // Length of a date formatted as MDD. + //Length of a date formatted as MDD. public static final int SINGLE_DIGIT_MONTH_DATE_LENGTH = 3; private static final int MIN_FRACTION_SECONDS = 0; @@ -106,8 +110,8 @@ public class DateTimeFormatters { .toFormatter(Locale.ROOT) .withResolverStyle(ResolverStyle.STRICT); - public static final DateTimeFormatter SQL_LITERAL_DATE_TIME_FORMAT = - DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + public static final DateTimeFormatter SQL_LITERAL_DATE_TIME_FORMAT = DateTimeFormatter + .ofPattern("yyyy-MM-dd HH:mm:ss"); public static final DateTimeFormatter DATE_TIME_FORMATTER = new DateTimeFormatterBuilder() @@ -116,7 +120,9 @@ public class DateTimeFormatters { .appendOptional(STRICT_HOUR_MINUTE_SECOND_FORMATTER) .toFormatter(); - /** todo. only support timestamp in format yyyy-MM-dd HH:mm:ss. */ + /** + * todo. only support timestamp in format yyyy-MM-dd HH:mm:ss. + */ public static final DateTimeFormatter DATE_TIME_FORMATTER_WITHOUT_NANO = SQL_LITERAL_DATE_TIME_FORMAT; @@ -124,7 +130,10 @@ public class DateTimeFormatters { new DateTimeFormatterBuilder() .appendPattern("uuuu-MM-dd HH:mm:ss") .appendFraction( - ChronoField.NANO_OF_SECOND, MIN_FRACTION_SECONDS, MAX_FRACTION_SECONDS, true) + ChronoField.NANO_OF_SECOND, + MIN_FRACTION_SECONDS, + MAX_FRACTION_SECONDS, + true) .toFormatter(Locale.ROOT) .withResolverStyle(ResolverStyle.STRICT); @@ -132,7 +141,10 @@ public class DateTimeFormatters { new DateTimeFormatterBuilder() .appendPattern("[uuuu-MM-dd HH:mm:ss][uuuu-MM-dd HH:mm][HH:mm:ss][HH:mm][uuuu-MM-dd]") .appendFraction( - ChronoField.NANO_OF_SECOND, MIN_FRACTION_SECONDS, MAX_FRACTION_SECONDS, true) + ChronoField.NANO_OF_SECOND, + MIN_FRACTION_SECONDS, + MAX_FRACTION_SECONDS, + true) .toFormatter(Locale.ROOT) .withResolverStyle(ResolverStyle.STRICT); @@ -187,7 +199,7 @@ public class DateTimeFormatters { // YYYYMMDDhhmmss public static final DateTimeFormatter DATE_TIME_FORMATTER_LONG_YEAR = new DateTimeFormatterBuilder() - .appendValue(YEAR, 4) + .appendValue(YEAR,4) .appendPattern("MMddHHmmss") .toFormatter() .withResolverStyle(ResolverStyle.STRICT); @@ -202,8 +214,11 @@ public class DateTimeFormatters { // uuuu-MM-dd HH:mm:ss[xxx] public static final DateTimeFormatter DATE_TIME_FORMATTER_WITH_TZ = new DateTimeFormatterBuilder() - .appendPattern("uuuu-MM-dd HH:mm:ss[xxx]") - .appendFraction( - ChronoField.NANO_OF_SECOND, MIN_FRACTION_SECONDS, MAX_FRACTION_SECONDS, true) - .toFormatter(); + .appendPattern("uuuu-MM-dd HH:mm:ss[xxx]") + .appendFraction( + ChronoField.NANO_OF_SECOND, + MIN_FRACTION_SECONDS, + MAX_FRACTION_SECONDS, + true) + .toFormatter(); } diff --git a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java index 593b4c4471..74fdf42571 100644 --- a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java @@ -21,9 +21,9 @@ public class DateTimeUtils { /** * Util method to round the date/time with given unit. * - * @param utcMillis Date/time value to round, given in utc millis - * @param unitMillis Date/time interval unit in utc millis - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param unitMillis Date/time interval unit in utc millis + * @return Rounded date/time value in utc millis */ public static long roundFloor(long utcMillis, long unitMillis) { return utcMillis - utcMillis % unitMillis; @@ -32,9 +32,9 @@ public static long roundFloor(long utcMillis, long unitMillis) { /** * Util method to round the date/time in week(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of weeks as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of weeks as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundWeek(long utcMillis, int interval) { return roundFloor(utcMillis + 259200000L, 604800000L * interval) - 259200000L; @@ -43,18 +43,16 @@ public static long roundWeek(long utcMillis, int interval) { /** * Util method to round the date/time in month(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of months as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of months as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundMonth(long utcMillis, int interval) { ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); - ZonedDateTime zonedDateTime = - Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval); - long monthDiff = - (zonedDateTime.getYear() - initDateTime.getYear()) * 12L - + zonedDateTime.getMonthValue() - - initDateTime.getMonthValue(); + ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID) + .plusMonths(interval); + long monthDiff = (zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime + .getMonthValue() - initDateTime.getMonthValue(); long monthToAdd = (monthDiff / interval - 1) * interval; return initDateTime.plusMonths(monthToAdd).toInstant().toEpochMilli(); } @@ -62,18 +60,16 @@ public static long roundMonth(long utcMillis, int interval) { /** * Util method to round the date/time in quarter(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of quarters as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of quarters as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundQuarter(long utcMillis, int interval) { ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); - ZonedDateTime zonedDateTime = - Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval * 3L); - long monthDiff = - ((zonedDateTime.getYear() - initDateTime.getYear()) * 12L - + zonedDateTime.getMonthValue() - - initDateTime.getMonthValue()); + ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID) + .plusMonths(interval * 3L); + long monthDiff = ((zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime + .getMonthValue() - initDateTime.getMonthValue()); long monthToAdd = (monthDiff / (interval * 3L) - 1) * interval * 3; return initDateTime.plusMonths(monthToAdd).toInstant().toEpochMilli(); } @@ -81,9 +77,9 @@ public static long roundQuarter(long utcMillis, int interval) { /** * Util method to round the date/time in year(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of years as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of years as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundYear(long utcMillis, int interval) { ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); @@ -128,25 +124,29 @@ public Boolean isValidMySqlTimeZoneId(ZoneId zone) { ZonedDateTime passedTzValidator = defaultDateTime.withZoneSameInstant(zone).withZoneSameLocal(defaultTz); - return (passedTzValidator.isBefore(maxTzValidator) || passedTzValidator.isEqual(maxTzValidator)) - && (passedTzValidator.isAfter(minTzValidator) || passedTzValidator.isEqual(minTzValidator)); + return (passedTzValidator.isBefore(maxTzValidator) + || passedTzValidator.isEqual(maxTzValidator)) + && (passedTzValidator.isAfter(minTzValidator) + || passedTzValidator.isEqual(minTzValidator)); } /** - * Extracts LocalDateTime from a datetime ExprValue. Uses `FunctionProperties` for - * `ExprTimeValue`. + * Extracts LocalDateTime from a datetime ExprValue. + * Uses `FunctionProperties` for `ExprTimeValue`. */ - public static LocalDateTime extractDateTime( - ExprValue value, FunctionProperties functionProperties) { + public static LocalDateTime extractDateTime(ExprValue value, + FunctionProperties functionProperties) { return value instanceof ExprTimeValue ? ((ExprTimeValue) value).datetimeValue(functionProperties) : value.datetimeValue(); } /** - * Extracts LocalDate from a datetime ExprValue. Uses `FunctionProperties` for `ExprTimeValue`. + * Extracts LocalDate from a datetime ExprValue. + * Uses `FunctionProperties` for `ExprTimeValue`. */ - public static LocalDate extractDate(ExprValue value, FunctionProperties functionProperties) { + public static LocalDate extractDate(ExprValue value, + FunctionProperties functionProperties) { return value instanceof ExprTimeValue ? ((ExprTimeValue) value).dateValue(functionProperties) : value.dateValue(); diff --git a/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java b/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java index f04bf3748f..e8324af5f4 100644 --- a/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.utils; import java.util.List; @@ -10,13 +11,17 @@ import lombok.experimental.UtilityClass; import org.opensearch.sql.expression.Expression; -/** Utils for {@link Expression}. */ +/** + * Utils for {@link Expression}. + */ @UtilityClass public class ExpressionUtils { public static String PATH_SEP = "."; - /** Format the list of {@link Expression}. */ + /** + * Format the list of {@link Expression}. + */ public static String format(List expressionList) { return expressionList.stream().map(Expression::toString).collect(Collectors.joining(",")); } diff --git a/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java b/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java index 0e4bdcf202..f4ece6a190 100644 --- a/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.utils; import java.util.regex.Pattern; @@ -14,9 +15,9 @@ @UtilityClass public class OperatorUtils { /** - * Wildcard pattern matcher util. Percent (%) character for wildcard, Underscore (_) character for - * a single character match. - * + * Wildcard pattern matcher util. + * Percent (%) character for wildcard, + * Underscore (_) character for a single character match. * @param pattern string pattern to match. * @return if text matches pattern returns true; else return false. */ @@ -29,13 +30,12 @@ public static ExprBooleanValue matches(ExprValue text, ExprValue pattern) { /** * Checks if text matches regular expression pattern. - * * @param pattern string pattern to match. * @return if text matches pattern returns true; else return false. */ public static ExprIntegerValue matchesRegexp(ExprValue text, ExprValue pattern) { - return new ExprIntegerValue( - Pattern.compile(pattern.stringValue()).matcher(text.stringValue()).matches() ? 1 : 0); + return new ExprIntegerValue(Pattern.compile(pattern.stringValue()).matcher(text.stringValue()) + .matches() ? 1 : 0); } private static final char DEFAULT_ESCAPE = '\\'; diff --git a/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java b/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java index e659cfdf50..6c640482d0 100644 --- a/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.utils; import com.google.common.collect.ImmutableMap; @@ -17,27 +18,30 @@ import org.opensearch.sql.expression.parse.PatternsExpression; import org.opensearch.sql.expression.parse.RegexExpression; -/** Utils for {@link ParseExpression}. */ +/** + * Utils for {@link ParseExpression}. + */ @UtilityClass public class ParseUtils { private static final String NEW_FIELD_KEY = "new_field"; - private static final Map FACTORY_MAP = - ImmutableMap.of( - ParseMethod.REGEX, RegexExpression::new, - ParseMethod.GROK, GrokExpression::new, - ParseMethod.PATTERNS, PatternsExpression::new); + private static final Map FACTORY_MAP = ImmutableMap.of( + ParseMethod.REGEX, RegexExpression::new, + ParseMethod.GROK, GrokExpression::new, + ParseMethod.PATTERNS, PatternsExpression::new + ); /** * Construct corresponding ParseExpression by {@link ParseMethod}. * * @param parseMethod method used to parse * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param pattern pattern used for parsing + * @param identifier derived field * @return {@link ParseExpression} */ - public static ParseExpression createParseExpression( - ParseMethod parseMethod, Expression sourceField, Expression pattern, Expression identifier) { + public static ParseExpression createParseExpression(ParseMethod parseMethod, + Expression sourceField, Expression pattern, + Expression identifier) { return FACTORY_MAP.get(parseMethod).initialize(sourceField, pattern, identifier); } @@ -47,23 +51,21 @@ public static ParseExpression createParseExpression( * @param pattern pattern used for parsing * @return list of names of the derived fields */ - public static List getNamedGroupCandidates( - ParseMethod parseMethod, String pattern, Map arguments) { + public static List getNamedGroupCandidates(ParseMethod parseMethod, String pattern, + Map arguments) { switch (parseMethod) { case REGEX: return RegexExpression.getNamedGroupCandidates(pattern); case GROK: return GrokExpression.getNamedGroupCandidates(pattern); default: - return PatternsExpression.getNamedGroupCandidates( - arguments.containsKey(NEW_FIELD_KEY) - ? (String) arguments.get(NEW_FIELD_KEY).getValue() - : null); + return PatternsExpression.getNamedGroupCandidates(arguments.containsKey(NEW_FIELD_KEY) + ? (String) arguments.get(NEW_FIELD_KEY).getValue() : null); } } private interface ParseExpressionFactory { - ParseExpression initialize( - Expression sourceField, Expression expression, Expression identifier); + ParseExpression initialize(Expression sourceField, Expression expression, + Expression identifier); } } diff --git a/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java b/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java index 38d2753f6c..5325ea371a 100644 --- a/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java @@ -3,32 +3,44 @@ * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.utils; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.experimental.UtilityClass; -/** System Index Utils. Todo. Find the better name for this class. */ +/** + * System Index Utils. + * Todo. Find the better name for this class. + */ @UtilityClass public class SystemIndexUtils { public static final String TABLE_NAME_FOR_TABLES_INFO = "tables"; - - /** The suffix of all the system tables. */ + /** + * The suffix of all the system tables. + */ private static final String SYS_TABLES_SUFFIX = "ODFE_SYS_TABLE"; - /** The suffix of all the meta tables. */ + /** + * The suffix of all the meta tables. + */ private static final String SYS_META_SUFFIX = "META_" + SYS_TABLES_SUFFIX; - /** The suffix of all the table mappings. */ - private static final String SYS_MAPPINGS_SUFFIX = "MAPPINGS_" + SYS_TABLES_SUFFIX; + /** + * The suffix of all the table mappings. + */ + private static final String SYS_MAPPINGS_SUFFIX = "MAPPINGS_" + SYS_TABLES_SUFFIX; - /** The ALL.META_ODFE_SYS_TABLE contain all the table info. */ + /** + * The ALL.META_ODFE_SYS_TABLE contain all the table info. + */ public static final String TABLE_INFO = "ALL." + SYS_META_SUFFIX; public static final String DATASOURCES_TABLE_NAME = ".DATASOURCES"; + public static Boolean isSystemIndex(String indexName) { return indexName.endsWith(SYS_TABLES_SUFFIX); } @@ -50,7 +62,8 @@ public static String mappingTable(String indexName) { public static SystemTable systemTable(String indexName) { final int lastDot = indexName.lastIndexOf("."); String suffix = indexName.substring(lastDot + 1); - String tableName = indexName.substring(0, lastDot).replace("%", "*"); + String tableName = indexName.substring(0, lastDot) + .replace("%", "*"); if (suffix.equalsIgnoreCase(SYS_META_SUFFIX)) { return new SystemInfoTable(tableName); @@ -61,7 +74,9 @@ public static SystemTable systemTable(String indexName) { } } - /** System Table. */ + /** + * System Table. + */ public interface SystemTable { String getTableName(); @@ -75,7 +90,9 @@ default boolean isMetaInfoTable() { } } - /** System Info Table. */ + /** + * System Info Table. + */ @Getter @RequiredArgsConstructor public static class SystemInfoTable implements SystemTable { @@ -87,7 +104,9 @@ public boolean isSystemInfoTable() { } } - /** System Table. */ + /** + * System Table. + */ @Getter @RequiredArgsConstructor public static class MetaInfoTable implements SystemTable {