diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java index 11d25c3ce8..4f8a62d898 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import java.util.HashMap; @@ -11,84 +10,85 @@ import java.util.Optional; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Environment for symbol and its attribute (type) in the current scope - */ +/** Environment for symbol and its attribute (type) in the current scope */ public class Environment { - private final Environment parent; - - private final SymbolTable symbolTable; + private final Environment parent; - public Environment(Environment parent) { - this.parent = parent; - this.symbolTable = new SymbolTable(); - } + private final SymbolTable symbolTable; - /** - * Define symbol with the type - * @param symbol symbol to define - * @param type type - */ - public void define(Symbol symbol, Type type) { - symbolTable.store(symbol, type); - } + public Environment(Environment parent) { + this.parent = parent; + this.symbolTable = new SymbolTable(); + } - /** - * Resolve symbol in the environment - * @param symbol symbol to look up - * @return type if exist - */ - public Optional resolve(Symbol symbol) { - Optional type = Optional.empty(); - for (Environment cur = this; cur != null; cur = cur.parent) { - type = cur.symbolTable.lookup(symbol); - if (type.isPresent()) { - break; - } - } - return type; - } + /** + * Define symbol with the type + * + * @param symbol symbol to define + * @param type type + */ + public void define(Symbol symbol, Type type) { + symbolTable.store(symbol, type); + } - /** - * Resolve symbol definitions by a prefix. - * @param prefix a prefix of symbol - * @return all symbols with types that starts with the prefix - */ - public Map resolveByPrefix(Symbol prefix) { - Map typeByName = new HashMap<>(); - for (Environment cur = this; cur != null; cur = cur.parent) { - typeByName.putAll(cur.symbolTable.lookupByPrefix(prefix)); - } - return typeByName; + /** + * Resolve symbol in the environment + * + * @param symbol symbol to look up + * @return type if exist + */ + public Optional resolve(Symbol symbol) { + Optional type = Optional.empty(); + for (Environment cur = this; cur != null; cur = cur.parent) { + type = cur.symbolTable.lookup(symbol); + if (type.isPresent()) { + break; + } } + return type; + } - /** - * Resolve all symbols in the namespace. - * @param namespace a namespace - * @return all symbols in the namespace - */ - public Map resolveAll(Namespace namespace) { - Map result = new HashMap<>(); - for (Environment cur = this; cur != null; cur = cur.parent) { - // putIfAbsent ensures inner most definition will be used (shadow outers) - cur.symbolTable.lookupAll(namespace).forEach(result::putIfAbsent); - } - return result; + /** + * Resolve symbol definitions by a prefix. + * + * @param prefix a prefix of symbol + * @return all symbols with types that starts with the prefix + */ + public Map resolveByPrefix(Symbol prefix) { + Map typeByName = new HashMap<>(); + for (Environment cur = this; cur != null; cur = cur.parent) { + typeByName.putAll(cur.symbolTable.lookupByPrefix(prefix)); } + return typeByName; + } - /** Current environment is root and no any symbol defined */ - public boolean isEmpty(Namespace namespace) { - for (Environment cur = this; cur != null; cur = cur.parent) { - if (!cur.symbolTable.isEmpty(namespace)) { - return false; - } - } - return true; + /** + * Resolve all symbols in the namespace. + * + * @param namespace a namespace + * @return all symbols in the namespace + */ + public Map resolveAll(Namespace namespace) { + Map result = new HashMap<>(); + for (Environment cur = this; cur != null; cur = cur.parent) { + // putIfAbsent ensures inner most definition will be used (shadow outers) + cur.symbolTable.lookupAll(namespace).forEach(result::putIfAbsent); } + return result; + } - public Environment getParent() { - return parent; + /** Current environment is root and no any symbol defined */ + public boolean isEmpty(Namespace namespace) { + for (Environment cur = this; cur != null; cur = cur.parent) { + if (!cur.symbolTable.isEmpty(namespace)) { + return false; + } } + return true; + } + public Environment getParent() { + return parent; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java index b591de5783..c500809a70 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java @@ -3,27 +3,22 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; -/** - * Namespace of symbol to avoid naming conflict - */ +/** Namespace of symbol to avoid naming conflict */ public enum Namespace { + FIELD_NAME("Field"), + FUNCTION_NAME("Function"), + OPERATOR_NAME("Operator"); - FIELD_NAME("Field"), - FUNCTION_NAME("Function"), - OPERATOR_NAME("Operator"); - - private final String name; - - Namespace(String name) { - this.name = name; - } + private final String name; - @Override - public String toString() { - return name; - } + Namespace(String name) { + this.name = name; + } + @Override + public String toString() { + return name; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java index 75bc306cd9..02decab1ae 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.operator; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.TYPE_ERROR; @@ -13,35 +12,32 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Join operator - */ +/** Join operator */ public enum JoinOperator implements Type { - JOIN; - - @Override - public String getName() { - return name(); - } - - @Override - public Type construct(List others) { - Optional isAnyNonIndexType = others.stream(). - filter(type -> !(type instanceof OpenSearchIndex)). - findAny(); - if (isAnyNonIndexType.isPresent()) { - return TYPE_ERROR; - } - return others.get(0); - } - - @Override - public String usage() { - return "Please join index with other index or its nested field."; - } - - @Override - public String toString() { - return "Operator [" + getName() + "]"; + JOIN; + + @Override + public String getName() { + return name(); + } + + @Override + public Type construct(List others) { + Optional isAnyNonIndexType = + others.stream().filter(type -> !(type instanceof OpenSearchIndex)).findAny(); + if (isAnyNonIndexType.isPresent()) { + return TYPE_ERROR; } + return others.get(0); + } + + @Override + public String usage() { + return "Please join index with other index or its nested field."; + } + + @Override + public String toString() { + return "Operator [" + getName() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java index 7efdb55426..ad718a8256 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.special; import java.util.Arrays; @@ -12,79 +11,84 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Generic type for more precise type expression - */ +/** Generic type for more precise type expression */ public class Generic implements Type { - /** Generic type placeholder namespace */ - private enum Name { T } - - /** Construct function to find generic type in argument list with same name */ - public static final Function T = types -> findSameGenericType(Name.T, types); - - /** Generic type name */ - private final Name name; - - /** Actual type binding to current generic type */ - private final Type binding; - - public Generic(Name name, Type type) { - this.name = name; - this.binding = type; - } - - public static Type T(Type type) { - return new Generic(Name.T, type); - } - - /** - * Return a function for replacing generic type in argument list with binding type. - * Ex. after T instance found in argument list [T(NUMBER), STRING], create function to return actualTypes[0] - * - * @param func function for finding generic type in argument list (namely, function T above) - * @param actualArgTypes actual argument types - */ - public static Function specialize(Function func, - Type[] actualArgTypes) { - if (func != T) { - return func; - } - - Type genericType = func.apply(actualArgTypes); - int genericTypeIndex = Arrays.asList(actualArgTypes).indexOf(genericType); - return actualTypes -> actualTypes[genericTypeIndex]; + /** Generic type placeholder namespace */ + private enum Name { + T + } + + /** Construct function to find generic type in argument list with same name */ + public static final Function T = types -> findSameGenericType(Name.T, types); + + /** Generic type name */ + private final Name name; + + /** Actual type binding to current generic type */ + private final Type binding; + + public Generic(Name name, Type type) { + this.name = name; + this.binding = type; + } + + public static Type T(Type type) { + return new Generic(Name.T, type); + } + + /** + * Return a function for replacing generic type in argument list with binding type. Ex. after T + * instance found in argument list [T(NUMBER), STRING], create function to return actualTypes[0] + * + * @param func function for finding generic type in argument list (namely, function T above) + * @param actualArgTypes actual argument types + */ + public static Function specialize( + Function func, Type[] actualArgTypes) { + if (func != T) { + return func; } - /** Find placeholder in argument list, ex. in [T(NUMBER), STRING] -> T, return instance at first T */ - private static Type findSameGenericType(Name name, Type[] types) { - return Arrays.stream(types). - filter(type -> type instanceof Generic). - filter(type -> ((Generic) type).name == name). - findFirst(). - orElseThrow(() -> new IllegalStateException(StringUtils.format( - "Type definition is wrong. Could not unbind generic type [%s] in type list %s.", - name, types)) - ); - } - - @Override - public String getName() { - return this.name.name(); - } - - @Override - public boolean isCompatible(Type other) { - return binding.isCompatible(other); - } - - @Override - public Type construct(List others) { - return binding.construct(others); - } - - @Override - public String usage() { - return binding.usage() + " " + name; - } + Type genericType = func.apply(actualArgTypes); + int genericTypeIndex = Arrays.asList(actualArgTypes).indexOf(genericType); + return actualTypes -> actualTypes[genericTypeIndex]; + } + + /** + * Find placeholder in argument list, ex. in [T(NUMBER), STRING] -> T, return instance at first T + */ + private static Type findSameGenericType(Name name, Type[] types) { + return Arrays.stream(types) + .filter(type -> type instanceof Generic) + .filter(type -> ((Generic) type).name == name) + .findFirst() + .orElseThrow( + () -> + new IllegalStateException( + StringUtils.format( + "Type definition is wrong. Could not unbind generic type [%s] in type list" + + " %s.", + name, types))); + } + + @Override + public String getName() { + return this.name.name(); + } + + @Override + public boolean isCompatible(Type other) { + return binding.isCompatible(other); + } + + @Override + public Type construct(List others) { + return binding.construct(others); + } + + @Override + public String usage() { + return binding.usage() + " " + name; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java index 511f932a0f..bd78c1b03f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java @@ -3,78 +3,74 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import java.util.List; -/** - * Generic parse tree visitor without dependency on concrete parse tree class. - */ +/** Generic parse tree visitor without dependency on concrete parse tree class. */ public interface GenericSqlParseTreeVisitor { - default void visitRoot() {} - - default void visitQuery() {} + default void visitRoot() {} - default void endVisitQuery() {} + default void visitQuery() {} - default T visitSelect(List items) { - return defaultValue(); - } + default void endVisitQuery() {} - default T visitSelectAllColumn() { - return defaultValue(); - } + default T visitSelect(List items) { + return defaultValue(); + } - default void visitAs(String alias, T type) {} + default T visitSelectAllColumn() { + return defaultValue(); + } - default T visitIndexName(String indexName) { - return defaultValue(); - } + default void visitAs(String alias, T type) {} - default T visitFieldName(String fieldName) { - return defaultValue(); - } + default T visitIndexName(String indexName) { + return defaultValue(); + } - default T visitFunctionName(String funcName) { - return defaultValue(); - } + default T visitFieldName(String fieldName) { + return defaultValue(); + } - default T visitOperator(String opName) { - return defaultValue(); - } + default T visitFunctionName(String funcName) { + return defaultValue(); + } - default T visitString(String text) { - return defaultValue(); - } + default T visitOperator(String opName) { + return defaultValue(); + } - default T visitInteger(String text) { - return defaultValue(); - } + default T visitString(String text) { + return defaultValue(); + } - default T visitFloat(String text) { - return defaultValue(); - } + default T visitInteger(String text) { + return defaultValue(); + } - default T visitBoolean(String text) { - return defaultValue(); - } + default T visitFloat(String text) { + return defaultValue(); + } - default T visitDate(String text) { - return defaultValue(); - } + default T visitBoolean(String text) { + return defaultValue(); + } - default T visitNull() { - return defaultValue(); - } + default T visitDate(String text) { + return defaultValue(); + } - default T visitConvertedType(String text) { - return defaultValue(); - } + default T visitNull() { + return defaultValue(); + } - default T defaultValue() { - return null; - } + default T visitConvertedType(String text) { + return defaultValue(); + } + default T defaultValue() { + return null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java index 1b6be05f20..09471fa2d7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.SQLExpr; @@ -13,143 +12,136 @@ import org.opensearch.sql.legacy.parser.NestedType; /** - * - * * @author ansj */ public class Field implements Cloneable { - /** - * Constant for '*' field in SELECT - */ - public static final Field STAR = new Field("*", ""); - - protected String name; - protected SQLAggregateOption option; - private String alias; - private NestedType nested; - private ChildrenType children; - private SQLExpr expression; - - public Field(String name, String alias) { - this.name = name; - this.alias = alias; - this.nested = null; - this.children = null; - this.option = null; - } - - public Field(String name, String alias, NestedType nested, ChildrenType children) { - this.name = name; - this.alias = alias; - this.nested = nested; - this.children = children; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getAlias() { - return alias; - } - - public void setAlias(String alias) { - this.alias = alias; - } - - public boolean isNested() { - return this.nested != null; - } - - public boolean isReverseNested() { - return this.nested != null && this.nested.isReverse(); - } - - public void setNested(NestedType nested) { - this.nested = nested; - } - - public String getNestedPath() { - if (this.nested == null) { - return null; - } - - return this.nested.path; - } - - public boolean isChildren() { - return this.children != null; - } - - public void setChildren(ChildrenType children) { - this.children = children; - } - - public String getChildType() { - if (this.children == null) { - return null; - } - return this.children.childType; - } - - public void setAggregationOption(SQLAggregateOption option) { - this.option = option; - } - - public SQLAggregateOption getOption() { - return option; - } - - @Override - public String toString() { - return this.name; - } + /** Constant for '*' field in SELECT */ + public static final Field STAR = new Field("*", ""); + + protected String name; + protected SQLAggregateOption option; + private String alias; + private NestedType nested; + private ChildrenType children; + private SQLExpr expression; + + public Field(String name, String alias) { + this.name = name; + this.alias = alias; + this.nested = null; + this.children = null; + this.option = null; + } + + public Field(String name, String alias, NestedType nested, ChildrenType children) { + this.name = name; + this.alias = alias; + this.nested = nested; + this.children = children; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public boolean isNested() { + return this.nested != null; + } + + public boolean isReverseNested() { + return this.nested != null && this.nested.isReverse(); + } + + public void setNested(NestedType nested) { + this.nested = nested; + } + + public String getNestedPath() { + if (this.nested == null) { + return null; + } + + return this.nested.path; + } + + public boolean isChildren() { + return this.children != null; + } + + public void setChildren(ChildrenType children) { + this.children = children; + } + + public String getChildType() { + if (this.children == null) { + return null; + } + return this.children.childType; + } + + public void setAggregationOption(SQLAggregateOption option) { + this.option = option; + } + + public SQLAggregateOption getOption() { + return option; + } + + @Override + public String toString() { + return this.name; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != this.getClass()) { + return false; + } + Field other = (Field) obj; + boolean namesAreEqual = + (other.getName() == null && this.name == null) || other.getName().equals(this.name); + if (!namesAreEqual) { + return false; + } + return (other.getAlias() == null && this.alias == null) || other.getAlias().equals(this.alias); + } + + @Override + public int hashCode() { // Bug: equals() is present but hashCode was missing + return Objects.hash(name, alias); + } + + @Override + protected Object clone() throws CloneNotSupportedException { + return new Field(new String(this.name), new String(this.alias)); + } + + /** Returns true if the field is script field. */ + public boolean isScriptField() { + return false; + } + + public void setExpression(SQLExpr expression) { + this.expression = expression; + } - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (obj.getClass() != this.getClass()) { - return false; - } - Field other = (Field) obj; - boolean namesAreEqual = (other.getName() == null && this.name == null) - || other.getName().equals(this.name); - if (!namesAreEqual) { - return false; - } - return (other.getAlias() == null && this.alias == null) - || other.getAlias().equals(this.alias); - } - - @Override - public int hashCode() { // Bug: equals() is present but hashCode was missing - return Objects.hash(name, alias); - } - - @Override - protected Object clone() throws CloneNotSupportedException { - return new Field(new String(this.name), new String(this.alias)); - } - - /** - * Returns true if the field is script field. - */ - public boolean isScriptField() { - return false; - } - - public void setExpression(SQLExpr expression) { - this.expression = expression; - } - - public SQLExpr getExpression() { - return expression; - } + public SQLExpr getExpression() { + return expression; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java index 6455df727c..67ac7f0e3c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java @@ -3,55 +3,49 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; - -/** - * Represents the from clause. - * Contains index and type which the - * query refer to. - */ +/** Represents the from clause. Contains index and type which the query refer to. */ public class From { - private String index; - private String alias; - - /** - * Extract index and type from the 'from' string - * - * @param from The part after the FROM keyword. - */ - public From(String from) { - index = from; - } - - public From(String from, String alias) { - this(from); - this.alias = alias; - } - - public String getIndex() { - return index; - } - - public void setIndex(String index) { - this.index = index; - } - - public String getAlias() { - return alias; - } - - public void setAlias(String alias) { - this.alias = alias; - } - - @Override - public String toString() { - StringBuilder str = new StringBuilder(index); - if (alias != null) { - str.append(" AS ").append(alias); - } - return str.toString(); + private String index; + private String alias; + + /** + * Extract index and type from the 'from' string + * + * @param from The part after the FROM keyword. + */ + public From(String from) { + index = from; + } + + public From(String from, String alias) { + this(from); + this.alias = alias; + } + + public String getIndex() { + return index; + } + + public void setIndex(String index) { + this.index = index; + } + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + @Override + public String toString() { + StringBuilder str = new StringBuilder(index); + if (alias != null) { + str.append(" AS ").append(alias); } + return str.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java index 30cfba4c7a..7d0765580b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import static java.util.stream.Collectors.joining; @@ -29,100 +28,106 @@ * Domain object for HAVING clause in SQL which covers both the parsing and explain logic. *

* Responsibilities: - * 1. Parsing: parse conditions out during initialization - * 2. Explain: translate conditions to OpenSearch query DSL (Bucket Selector Aggregation) + *

    + *
  1. Parsing: parse conditions out during initialization + *
  2. Explain: translate conditions to OpenSearch query DSL (Bucket Selector Aggregation) + *
*/ public class Having { - private static final String BUCKET_SELECTOR_NAME = "bucket_filter"; - private static final String PARAMS = "params."; - private static final String AND = " && "; - private static final String OR = " || "; - - /** - * Conditions parsed out of HAVING clause - */ - private final List conditions; - - private HavingParser havingParser; - - public List getHavingFields() { - return havingParser.getHavingFields(); + private static final String BUCKET_SELECTOR_NAME = "bucket_filter"; + private static final String PARAMS = "params."; + private static final String AND = " && "; + private static final String OR = " || "; + + /** Conditions parsed out of HAVING clause */ + private final List conditions; + + private HavingParser havingParser; + + public List getHavingFields() { + return havingParser.getHavingFields(); + } + + /** + * Construct by HAVING expression + * + * @param havingExpr having expression + * @param parser where parser + * @throws SqlParseException exception thrown by where parser + */ + public Having(SQLExpr havingExpr, WhereParser parser) throws SqlParseException { + havingParser = new HavingParser(parser); + conditions = parseHavingExprToConditions(havingExpr, havingParser); + } + + public List getConditions() { + return conditions; + } + + /** + * Construct by GROUP BY expression with null check + * + * @param groupByExpr group by expression + * @param parser where parser + * @throws SqlParseException exception thrown by where parser + */ + public Having(SQLSelectGroupByClause groupByExpr, WhereParser parser) throws SqlParseException { + this(groupByExpr == null ? null : groupByExpr.getHaving(), parser); + } + + /** + * Add Bucket Selector Aggregation under group by aggregation with sibling of aggregation of + * fields in SELECT. OpenSearch makes sure that all sibling runs before bucket selector + * aggregation. + * + * @param groupByAgg aggregation builder for GROUP BY clause + * @param fields fields in SELECT clause + * @throws SqlParseException exception thrown for unknown expression + */ + public void explain(AggregationBuilder groupByAgg, List fields) throws SqlParseException { + if (groupByAgg == null || conditions.isEmpty()) { + return; } - /** - * Construct by HAVING expression - * - * @param havingExpr having expression - * @param parser where parser - * @throws SqlParseException exception thrown by where parser - */ - public Having(SQLExpr havingExpr, WhereParser parser) throws SqlParseException { - havingParser = new HavingParser(parser); - conditions = parseHavingExprToConditions(havingExpr, havingParser); + // parsing the fields from SELECT and HAVING clause + groupByAgg.subAggregation( + bucketSelector( + BUCKET_SELECTOR_NAME, + contextForFieldsInSelect(Iterables.concat(fields, getHavingFields())), + explainConditions())); + } + + private List parseHavingExprToConditions(SQLExpr havingExpr, HavingParser parser) + throws SqlParseException { + if (havingExpr == null) { + return Collections.emptyList(); } - public List getConditions() { - return conditions; + Where where = Where.newInstance(); + parser.parseWhere(havingExpr, where); + return where.getWheres(); + } + + private Map contextForFieldsInSelect(Iterable fields) { + Map context = new HashMap<>(); + for (Field field : fields) { + if (field instanceof MethodField) { + // It's required to add to context even if alias in SELECT is exactly same name as that in + // script + context.put( + field.getAlias(), bucketsPath(field.getAlias(), ((MethodField) field).getParams())); + } } + return context; + } - /** - * Construct by GROUP BY expression with null check - * - * @param groupByExpr group by expression - * @param parser where parser - * @throws SqlParseException exception thrown by where parser - */ - public Having(SQLSelectGroupByClause groupByExpr, WhereParser parser) throws SqlParseException { - this(groupByExpr == null ? null : groupByExpr.getHaving(), parser); - } - - /** - * Add Bucket Selector Aggregation under group by aggregation with sibling of aggregation of fields in SELECT. - * OpenSearch makes sure that all sibling runs before bucket selector aggregation. - * - * @param groupByAgg aggregation builder for GROUP BY clause - * @param fields fields in SELECT clause - * @throws SqlParseException exception thrown for unknown expression - */ - public void explain(AggregationBuilder groupByAgg, List fields) throws SqlParseException { - if (groupByAgg == null || conditions.isEmpty()) { - return; - } - - // parsing the fields from SELECT and HAVING clause - groupByAgg.subAggregation(bucketSelector(BUCKET_SELECTOR_NAME, - contextForFieldsInSelect(Iterables.concat(fields, getHavingFields())), - explainConditions())); - } - - private List parseHavingExprToConditions(SQLExpr havingExpr, HavingParser parser) - throws SqlParseException { - if (havingExpr == null) { - return Collections.emptyList(); - } - - Where where = Where.newInstance(); - parser.parseWhere(havingExpr, where); - return where.getWheres(); - } - - private Map contextForFieldsInSelect(Iterable fields) { - Map context = new HashMap<>(); - for (Field field : fields) { - if (field instanceof MethodField) { - // It's required to add to context even if alias in SELECT is exactly same name as that in script - context.put(field.getAlias(), bucketsPath(field.getAlias(), ((MethodField) field).getParams())); - } - } - return context; - } - - private Script explainConditions() throws SqlParseException { - return new Script(doExplain(conditions)); - } + private Script explainConditions() throws SqlParseException { + return new Script(doExplain(conditions)); + } /** + *
      * Explain conditions recursively.
      * Example: HAVING c >= 2 OR NOT (a > 20 AND c <= 10 OR a < 1) OR a < 5
      * Object: Where(?:
@@ -138,7 +143,7 @@ private Script explainConditions() throws SqlParseException {
      * Note: a) Where(connector : condition expression).
      * b) Condition is a subclass of Where.
      * c) connector=? means it doesn't matter for first condition in the list
-     *
+     * 
* @param wheres conditions * @return painless script string * @throws SqlParseException unknown type of expression other than identifier and value @@ -148,75 +153,74 @@ private String doExplain(List wheres) throws SqlParseException { return ""; } - StringBuilder script = new StringBuilder(); - for (Where cond : wheres) { - if (script.length() > 0) { - script.append(cond.getConn() == Where.CONN.AND ? AND : OR); - } - - if (cond instanceof Condition) { - script.append(createScript((Condition) cond)); - } else { - script.append('('). - append(doExplain(cond.getWheres())). - append(')'); - } - } - return script.toString(); + StringBuilder script = new StringBuilder(); + for (Where cond : wheres) { + if (script.length() > 0) { + script.append(cond.getConn() == Where.CONN.AND ? AND : OR); + } + + if (cond instanceof Condition) { + script.append(createScript((Condition) cond)); + } else { + script.append('(').append(doExplain(cond.getWheres())).append(')'); + } } - - private String createScript(Condition cond) throws SqlParseException { - String name = cond.getName(); - Object value = cond.getValue(); - switch (cond.getOPERATOR()) { - case EQ: - case GT: - case LT: - case GTE: - case LTE: - case IS: - case ISN: - return expr(name, cond.getOpertatorSymbol(), value); - case N: - return expr(name, "!=", value); - case BETWEEN: { - Object[] values = (Object[]) value; - return expr(name, ">=", values[0]) + AND + expr(name, "<=", values[1]); - } - case NBETWEEN: { - Object[] values = (Object[]) value; - return expr(name, "<", values[0]) + OR + expr(name, ">", values[1]); - } - case IN: - return Arrays.stream((Object[]) value). - map(val -> expr(name, "==", val)). - collect(joining(OR)); - case NIN: - return Arrays.stream((Object[]) value). - map(val -> expr(name, "!=", val)). - collect(joining(AND)); - default: - throw new SqlParseException("Unsupported operation in HAVING clause: " + cond.getOPERATOR()); + return script.toString(); + } + + private String createScript(Condition cond) throws SqlParseException { + String name = cond.getName(); + Object value = cond.getValue(); + switch (cond.getOPERATOR()) { + case EQ: + case GT: + case LT: + case GTE: + case LTE: + case IS: + case ISN: + return expr(name, cond.getOpertatorSymbol(), value); + case N: + return expr(name, "!=", value); + case BETWEEN: + { + Object[] values = (Object[]) value; + return expr(name, ">=", values[0]) + AND + expr(name, "<=", values[1]); } - } - - private String expr(String name, String operator, Object value) { - return String.join(" ", PARAMS + name, operator, value.toString()); - } - - /** - * Build the buckets_path. - * If the field is nested field, using the bucket path. - * else using the alias. - */ - private String bucketsPath(String alias, List kvValueList) { - if (kvValueList.size() == 1) { - KVValue kvValue = kvValueList.get(0); - if (StringUtils.equals(kvValue.key, "nested") - && kvValue.value instanceof NestedType) { - return ((NestedType) kvValue.value).getBucketPath(); - } + case NBETWEEN: + { + Object[] values = (Object[]) value; + return expr(name, "<", values[0]) + OR + expr(name, ">", values[1]); } - return alias; + case IN: + return Arrays.stream((Object[]) value) + .map(val -> expr(name, "==", val)) + .collect(joining(OR)); + case NIN: + return Arrays.stream((Object[]) value) + .map(val -> expr(name, "!=", val)) + .collect(joining(AND)); + default: + throw new SqlParseException( + "Unsupported operation in HAVING clause: " + cond.getOPERATOR()); + } + } + + private String expr(String name, String operator, Object value) { + return String.join(" ", PARAMS + name, operator, value.toString()); + } + + /** + * Build the buckets_path. If the field is nested field, using the bucket path. else using the + * alias. + */ + private String bucketsPath(String alias, List kvValueList) { + if (kvValueList.size() == 1) { + KVValue kvValue = kvValueList.get(0); + if (StringUtils.equals(kvValue.key, "nested") && kvValue.value instanceof NestedType) { + return ((NestedType) kvValue.value).getBucketPath(); + } } + return alias; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java index e97a482b40..2a5be5728c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java @@ -3,89 +3,87 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; -/** - * Class used to differentiate SHOW and DESCRIBE statements - */ +/** Class used to differentiate SHOW and DESCRIBE statements */ public class IndexStatement implements QueryStatement { - private StatementType statementType; - private String query; - private String indexPattern; - private String columnPattern; - - public IndexStatement(StatementType statementType, String query) { - this.statementType = statementType; - this.query = query; - - parseQuery(); - } - - private void parseQuery() { - String[] statement = query.split(" "); - - int tokenLength = statement.length; - try { - for (int i = 1; i < tokenLength; i++) { - switch (statement[i].toUpperCase()) { - case "TABLES": - if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { - if (i + 2 < tokenLength) { - indexPattern = replaceWildcard(statement[i + 2]); - i += 2; - } - } - break; - case "COLUMNS": - if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { - if (i + 2 < tokenLength) { - columnPattern = replaceWildcard(statement[i + 2]); - i += 2; - } - } - break; - } + private StatementType statementType; + private String query; + private String indexPattern; + private String columnPattern; + + public IndexStatement(StatementType statementType, String query) { + this.statementType = statementType; + this.query = query; + + parseQuery(); + } + + private void parseQuery() { + String[] statement = query.split(" "); + + int tokenLength = statement.length; + try { + for (int i = 1; i < tokenLength; i++) { + switch (statement[i].toUpperCase()) { + case "TABLES": + if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { + if (i + 2 < tokenLength) { + indexPattern = replaceWildcard(statement[i + 2]); + i += 2; + } } - - if (indexPattern == null) { - throw new IllegalArgumentException(); + break; + case "COLUMNS": + if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { + if (i + 2 < tokenLength) { + columnPattern = replaceWildcard(statement[i + 2]); + i += 2; + } } - } catch (Exception e) { - throw new IllegalArgumentException("Expected syntax example: " + syntaxString(), e); + break; } - } + } - private String replaceWildcard(String str) { - return str.replace("%", ".*").replace("_", "."); + if (indexPattern == null) { + throw new IllegalArgumentException(); + } + } catch (Exception e) { + throw new IllegalArgumentException("Expected syntax example: " + syntaxString(), e); } + } - private String syntaxString() { - if (statementType.equals(StatementType.SHOW)) { - return "'SHOW TABLES LIKE '"; - } else { - return "'DESCRIBE TABLES LIKE
[COLUMNS LIKE ]'"; - } - } + private String replaceWildcard(String str) { + return str.replace("%", ".*").replace("_", "."); + } - public StatementType getStatementType() { - return statementType; + private String syntaxString() { + if (statementType.equals(StatementType.SHOW)) { + return "'SHOW TABLES LIKE
'"; + } else { + return "'DESCRIBE TABLES LIKE
[COLUMNS LIKE ]'"; } + } - public String getQuery() { - return query; - } + public StatementType getStatementType() { + return statementType; + } - public String getIndexPattern() { - return indexPattern; - } + public String getQuery() { + return query; + } - public String getColumnPattern() { - return columnPattern; - } + public String getIndexPattern() { + return indexPattern; + } - public enum StatementType { - SHOW, DESCRIBE - } + public String getColumnPattern() { + return columnPattern; + } + + public enum StatementType { + SHOW, + DESCRIBE + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java index c77df6e9ad..211b33c68a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java @@ -3,85 +3,78 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; import java.util.List; import org.opensearch.sql.legacy.domain.hints.Hint; -/** - * Created by Eliran on 20/8/2015. - */ +/** Created by Eliran on 20/8/2015. */ public class JoinSelect extends Query { + private TableOnJoinSelect firstTable; + private TableOnJoinSelect secondTable; + private Where connectedWhere; + private List hints; + private List connectedConditions; + private int totalLimit; - private TableOnJoinSelect firstTable; - private TableOnJoinSelect secondTable; - private Where connectedWhere; - private List hints; - private List connectedConditions; - private int totalLimit; - - private final int DEAFULT_NUM_OF_RESULTS = 200; - - private SQLJoinTableSource.JoinType joinType; - - - public JoinSelect() { - firstTable = new TableOnJoinSelect(); - secondTable = new TableOnJoinSelect(); + private final int DEAFULT_NUM_OF_RESULTS = 200; - totalLimit = DEAFULT_NUM_OF_RESULTS; - } + private SQLJoinTableSource.JoinType joinType; + public JoinSelect() { + firstTable = new TableOnJoinSelect(); + secondTable = new TableOnJoinSelect(); - public Where getConnectedWhere() { - return connectedWhere; - } + totalLimit = DEAFULT_NUM_OF_RESULTS; + } - public void setConnectedWhere(Where connectedWhere) { - this.connectedWhere = connectedWhere; - } + public Where getConnectedWhere() { + return connectedWhere; + } - public TableOnJoinSelect getFirstTable() { - return firstTable; - } + public void setConnectedWhere(Where connectedWhere) { + this.connectedWhere = connectedWhere; + } - public TableOnJoinSelect getSecondTable() { - return secondTable; - } + public TableOnJoinSelect getFirstTable() { + return firstTable; + } + public TableOnJoinSelect getSecondTable() { + return secondTable; + } - public SQLJoinTableSource.JoinType getJoinType() { - return joinType; - } + public SQLJoinTableSource.JoinType getJoinType() { + return joinType; + } - public void setJoinType(SQLJoinTableSource.JoinType joinType) { - this.joinType = joinType; - } + public void setJoinType(SQLJoinTableSource.JoinType joinType) { + this.joinType = joinType; + } - public List getHints() { - return hints; - } + public List getHints() { + return hints; + } - public void setHints(List hints) { - this.hints = hints; - } + public void setHints(List hints) { + this.hints = hints; + } - public int getTotalLimit() { - return totalLimit; - } + public int getTotalLimit() { + return totalLimit; + } - public List getConnectedConditions() { - return connectedConditions; - } + public List getConnectedConditions() { + return connectedConditions; + } - public void setConnectedConditions(List connectedConditions) { - this.connectedConditions = connectedConditions; - } + public void setConnectedConditions(List connectedConditions) { + this.connectedConditions = connectedConditions; + } - public void setTotalLimit(int totalLimit) { - this.totalLimit = totalLimit; - } + public void setTotalLimit(int totalLimit) { + this.totalLimit = totalLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java index 10e2ad3d12..d864cbac12 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java @@ -3,30 +3,29 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; public class KVValue implements Cloneable { - public String key; - public Object value; + public String key; + public Object value; - public KVValue(Object value) { - this.value = value; - } + public KVValue(Object value) { + this.value = value; + } - public KVValue(String key, Object value) { - if (key != null) { - this.key = key.replace("'", ""); - } - this.value = value; + public KVValue(String key, Object value) { + if (key != null) { + this.key = key.replace("'", ""); } + this.value = value; + } - @Override - public String toString() { - if (key == null) { - return value.toString(); - } else { - return key + "=" + value; - } + @Override + public String toString() { + if (key == null) { + return value.toString(); + } else { + return key + "=" + value; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java index 4529c4344c..45d6d1053e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; @@ -14,96 +13,94 @@ import org.opensearch.sql.legacy.utils.Util; /** - * - * * @author ansj */ public class MethodField extends Field { - private List params = null; - - public MethodField(String name, List params, SQLAggregateOption option, String alias) { - super(name, alias); - this.params = params; - this.option = option; - if (alias == null || alias.trim().length() == 0) { - Map paramsAsMap = this.getParamsAsMap(); - if (paramsAsMap.containsKey("alias")) { - this.setAlias(paramsAsMap.get("alias").toString()); - } else { - this.setAlias(this.toString()); - } - } + private List params = null; + + public MethodField(String name, List params, SQLAggregateOption option, String alias) { + super(name, alias); + this.params = params; + this.option = option; + if (alias == null || alias.trim().length() == 0) { + Map paramsAsMap = this.getParamsAsMap(); + if (paramsAsMap.containsKey("alias")) { + this.setAlias(paramsAsMap.get("alias").toString()); + } else { + this.setAlias(this.toString()); + } } + } - public List getParams() { - return params; - } + public List getParams() { + return params; + } - public Map getParamsAsMap() { - Map paramsAsMap = new HashMap<>(); - if (this.params == null) { - return paramsAsMap; - } - for (KVValue kvValue : this.params) { - paramsAsMap.put(kvValue.key, kvValue.value); - } - return paramsAsMap; + public Map getParamsAsMap() { + Map paramsAsMap = new HashMap<>(); + if (this.params == null) { + return paramsAsMap; } - - @Override - public String toString() { - if (option != null) { - return this.name + "(" + option + " " + Util.joiner(params, ",") + ")"; - } - return this.name + "(" + Util.joiner(params, ",") + ")"; + for (KVValue kvValue : this.params) { + paramsAsMap.put(kvValue.key, kvValue.value); } + return paramsAsMap; + } - @Override - public boolean isNested() { - Map paramsAsMap = this.getParamsAsMap(); - return paramsAsMap.containsKey("nested") || paramsAsMap.containsKey("reverse_nested"); + @Override + public String toString() { + if (option != null) { + return this.name + "(" + option + " " + Util.joiner(params, ",") + ")"; } - - @Override - public boolean isReverseNested() { - return this.getParamsAsMap().containsKey("reverse_nested"); + return this.name + "(" + Util.joiner(params, ",") + ")"; + } + + @Override + public boolean isNested() { + Map paramsAsMap = this.getParamsAsMap(); + return paramsAsMap.containsKey("nested") || paramsAsMap.containsKey("reverse_nested"); + } + + @Override + public boolean isReverseNested() { + return this.getParamsAsMap().containsKey("reverse_nested"); + } + + @Override + public String getNestedPath() { + if (!this.isNested()) { + return null; } - - @Override - public String getNestedPath() { - if (!this.isNested()) { - return null; - } - if (this.isReverseNested()) { - String reverseNestedPath = this.getParamsAsMap().get("reverse_nested").toString(); - return reverseNestedPath.isEmpty() ? null : reverseNestedPath; - } - - // Fix bug: NestedType.toString() isn't implemented which won't return desired nested path - Object nestedField = getParamsAsMap().get("nested"); - if (nestedField instanceof NestedType) { - return ((NestedType) nestedField).path; - } - return nestedField.toString(); + if (this.isReverseNested()) { + String reverseNestedPath = this.getParamsAsMap().get("reverse_nested").toString(); + return reverseNestedPath.isEmpty() ? null : reverseNestedPath; } - @Override - public boolean isChildren() { - Map paramsAsMap = this.getParamsAsMap(); - return paramsAsMap.containsKey("children"); + // Fix bug: NestedType.toString() isn't implemented which won't return desired nested path + Object nestedField = getParamsAsMap().get("nested"); + if (nestedField instanceof NestedType) { + return ((NestedType) nestedField).path; } - - @Override - public String getChildType() { - if (!this.isChildren()) { - return null; - } - - return this.getParamsAsMap().get("children").toString(); + return nestedField.toString(); + } + + @Override + public boolean isChildren() { + Map paramsAsMap = this.getParamsAsMap(); + return paramsAsMap.containsKey("children"); + } + + @Override + public String getChildType() { + if (!this.isChildren()) { + return null; } - @Override - public boolean isScriptField() { - return "script".equals(getName()); - } + return this.getParamsAsMap().get("children").toString(); + } + + @Override + public boolean isScriptField() { + return "script".equals(getName()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java index 8a5c174c41..b83c63aae1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.hints; -/** - * Created by Eliran on 5/9/2015. - */ +/** Created by Eliran on 5/9/2015. */ public class Hint { - private HintType type; - private Object[] params; + private HintType type; + private Object[] params; - public Hint(HintType type, Object[] params) { - this.type = type; - this.params = params; - } + public Hint(HintType type, Object[] params) { + this.type = type; + this.params = params; + } - public HintType getType() { - return type; - } + public HintType getType() { + return type; + } - public Object[] getParams() { - return params; - } + public Object[] getParams() { + return params; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java index 18c68d57ab..81b676e3d5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.hints; - import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLParser; import java.io.IOException; @@ -18,211 +16,217 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Created by Eliran on 5/9/2015. - */ +/** Created by Eliran on 5/9/2015. */ public class HintFactory { - private static final String PREFIX = "! "; + private static final String PREFIX = "! "; - public static Hint getHintFromString(String hintAsString) throws SqlParseException { - if (hintAsString.startsWith("! USE_NESTED_LOOPS") || hintAsString.startsWith("! USE_NL")) { - return new Hint(HintType.USE_NESTED_LOOPS, null); - } - - if (hintAsString.startsWith("! SHARD_SIZE")) { - String[] numbers = getParamsFromHint(hintAsString, "! SHARD_SIZE"); - //todo: check if numbers etc.. - List params = new ArrayList<>(); - for (String number : numbers) { - if (number.equals("null") || number.equals("infinity")) { - params.add(null); - } else { - params.add(Integer.parseInt(number)); - } - } - return new Hint(HintType.SHARD_SIZE, params.toArray()); - } - - if (hintAsString.equals("! HASH_WITH_TERMS_FILTER")) { - return new Hint(HintType.HASH_WITH_TERMS_FILTER, null); - } - if (hintAsString.startsWith("! JOIN_TABLES_LIMIT")) { - String[] numbers = getParamsFromHint(hintAsString, "! JOIN_TABLES_LIMIT"); - //todo: check if numbers etc.. - List params = new ArrayList<>(); - for (String number : numbers) { - if (number.equals("null") || number.equals("infinity")) { - params.add(null); - } else { - params.add(Integer.parseInt(number)); - } - } - - return new Hint(HintType.JOIN_LIMIT, params.toArray()); - } - if (hintAsString.startsWith("! NL_MULTISEARCH_SIZE")) { - String[] number = getParamsFromHint(hintAsString, "! NL_MULTISEARCH_SIZE"); - //todo: check if numbers etc.. - int multiSearchSize = Integer.parseInt(number[0]); - return new Hint(HintType.NL_MULTISEARCH_SIZE, new Object[]{multiSearchSize}); - } - if (hintAsString.startsWith("! USE_SCROLL")) { - String[] scrollParams = getParamsFromHint(hintAsString, "! USE_SCROLL"); - if (scrollParams != null && scrollParams.length == 2) { - String param = scrollParams[0]; - return new Hint(HintType.USE_SCROLL, - new Object[]{ - (param.startsWith("\"") && param.endsWith("\"")) - || (param.startsWith("'") && param.endsWith("'")) - ? param.substring(1, param.length() - 1) : Integer.parseInt(param), - Integer.parseInt(scrollParams[1])}); - } else { - return new Hint(HintType.USE_SCROLL, new Object[]{50, 60000}); - } - } - if (hintAsString.startsWith("! IGNORE_UNAVAILABLE")) { - return new Hint(HintType.IGNORE_UNAVAILABLE, null); - } - if (hintAsString.startsWith("! DOCS_WITH_AGGREGATION")) { - Integer[] params = parseParamsAsInts(hintAsString, "! DOCS_WITH_AGGREGATION"); - return new Hint(HintType.DOCS_WITH_AGGREGATION, params); - } - if (hintAsString.startsWith("! ROUTINGS")) { - String[] routings = getParamsFromHint(hintAsString, "! ROUTINGS"); - return new Hint(HintType.ROUTINGS, routings); - } - if (hintAsString.startsWith("! HIGHLIGHT")) { - String[] heighlights = getParamsFromHint(hintAsString, "! HIGHLIGHT"); - ArrayList hintParams = new ArrayList(); - hintParams.add(heighlights[0]); - if (heighlights.length > 1) { - StringBuilder builder = new StringBuilder(); - for (int i = 1; i < heighlights.length; i++) { - if (i != 1) { - builder.append("\n"); - } - builder.append(heighlights[i]); - } - String heighlightParam = builder.toString(); - YAMLFactory yamlFactory = new YAMLFactory(); - YAMLParser yamlParser = null; - try { - yamlParser = yamlFactory.createParser(heighlightParam.toCharArray()); - YamlXContentParser yamlXContentParser = new YamlXContentParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, yamlParser); - Map map = yamlXContentParser.map(); - hintParams.add(map); - } catch (IOException e) { - throw new SqlParseException("could not parse heighlight hint: " + e.getMessage()); - } - } - return new Hint(HintType.HIGHLIGHT, hintParams.toArray()); - } - if (hintAsString.startsWith("! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS")) { - Integer[] params = parseParamsAsInts(hintAsString, "! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS"); - if (params.length > 3) { - throw new SqlParseException("MINUS_FETCH_AND_RESULT_LIMITS should have 3 int params " - + "(maxFromFirst,maxFromSecond,hitsPerScrollShard)"); - } - Integer[] paramsWithDefaults = new Integer[3]; - int defaultMaxFetchFromTable = 100000; - int defaultFetchOnScroll = 1000; - paramsWithDefaults[0] = defaultMaxFetchFromTable; - paramsWithDefaults[1] = defaultMaxFetchFromTable; - paramsWithDefaults[2] = defaultFetchOnScroll; - for (int i = 0; i < params.length; i++) { - paramsWithDefaults[i] = params[i]; - } - - return new Hint(HintType.MINUS_FETCH_AND_RESULT_LIMITS, paramsWithDefaults); - } - if (hintAsString.startsWith("! MINUS_USE_TERMS_OPTIMIZATION")) { - String[] param = getParamsFromHint(hintAsString, "! MINUS_USE_TERMS_OPTIMIZATION"); - boolean shouldLowerStringOnTerms = false; - if (param != null) { - if (param.length != 1) { - throw new SqlParseException( - "MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: false/true "); - } - try { - shouldLowerStringOnTerms = Boolean.parseBoolean(param[0].toLowerCase()); - } catch (Exception e) { - throw new SqlParseException("MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: " - + "false/true , got:" + param[0]); - } - } - return new Hint(HintType.MINUS_USE_TERMS_OPTIMIZATION, new Object[]{shouldLowerStringOnTerms}); - } - if (hintAsString.startsWith("! COLLAPSE")) { - String collapse = getParamFromHint(hintAsString, "! COLLAPSE"); - return new Hint(HintType.COLLAPSE, new String[]{collapse}); - } - if (hintAsString.startsWith("! POST_FILTER")) { - String postFilter = getParamFromHint(hintAsString, "! POST_FILTER"); - return new Hint(HintType.POST_FILTER, new String[]{postFilter}); - } - - Hint queryPlanHint = parseHintForQueryPlanner(hintAsString); - if (queryPlanHint != null) { - return queryPlanHint; - } - - return null; - } - - /** - * Parse hints for hash join in new query planning framework. - * Only check syntax error here and leave semantics interpret work for planner. - */ - private static Hint parseHintForQueryPlanner(String hintStr) { - if (hintStr.contains("(") - && (hintStr.startsWith("! JOIN_ALGORITHM_BLOCK_SIZE") - || hintStr.startsWith("! JOIN_SCROLL_PAGE_SIZE") - || hintStr.startsWith("! JOIN_CIRCUIT_BREAK_LIMIT") - || hintStr.startsWith("! JOIN_BACK_OFF_RETRY_INTERVALS") - || hintStr.startsWith("! JOIN_TIME_OUT") - )) { // Note that Trie tree is needed here if many hint options - - String hintName = hintStr.substring(PREFIX.length(), hintStr.indexOf('(')).trim(); - String hintPrefix = PREFIX + hintName; - HintType hintType = HintType.valueOf(hintName); - Integer[] params = parseParamsAsInts(hintStr, hintPrefix); - - if (params != null && params.length > 0) { - return new Hint(hintType, params); - } - } else if (hintStr.startsWith("! JOIN_ALGORITHM_USE_LEGACY")) { - return new Hint(HintType.JOIN_ALGORITHM_USE_LEGACY, new Object[0]); - } - return null; + public static Hint getHintFromString(String hintAsString) throws SqlParseException { + if (hintAsString.startsWith("! USE_NESTED_LOOPS") || hintAsString.startsWith("! USE_NL")) { + return new Hint(HintType.USE_NESTED_LOOPS, null); } - private static String getParamFromHint(String hint, String prefix) { - if (!hint.contains("(")) { - return null; - } - return hint.replace(prefix, "").replaceAll("\\s*\\(\\s*", "").replaceAll("\\s*\\,\\s*", ",") - .replaceAll("\\s*\\)\\s*", ""); + if (hintAsString.startsWith("! SHARD_SIZE")) { + String[] numbers = getParamsFromHint(hintAsString, "! SHARD_SIZE"); + // todo: check if numbers etc.. + List params = new ArrayList<>(); + for (String number : numbers) { + if (number.equals("null") || number.equals("infinity")) { + params.add(null); + } else { + params.add(Integer.parseInt(number)); + } + } + return new Hint(HintType.SHARD_SIZE, params.toArray()); } - private static String[] getParamsFromHint(String hint, String prefix) { - String param = getParamFromHint(hint, prefix); - return param != null ? param.split(",") : null; + if (hintAsString.equals("! HASH_WITH_TERMS_FILTER")) { + return new Hint(HintType.HASH_WITH_TERMS_FILTER, null); + } + if (hintAsString.startsWith("! JOIN_TABLES_LIMIT")) { + String[] numbers = getParamsFromHint(hintAsString, "! JOIN_TABLES_LIMIT"); + // todo: check if numbers etc.. + List params = new ArrayList<>(); + for (String number : numbers) { + if (number.equals("null") || number.equals("infinity")) { + params.add(null); + } else { + params.add(Integer.parseInt(number)); + } + } + + return new Hint(HintType.JOIN_LIMIT, params.toArray()); + } + if (hintAsString.startsWith("! NL_MULTISEARCH_SIZE")) { + String[] number = getParamsFromHint(hintAsString, "! NL_MULTISEARCH_SIZE"); + // todo: check if numbers etc.. + int multiSearchSize = Integer.parseInt(number[0]); + return new Hint(HintType.NL_MULTISEARCH_SIZE, new Object[] {multiSearchSize}); + } + if (hintAsString.startsWith("! USE_SCROLL")) { + String[] scrollParams = getParamsFromHint(hintAsString, "! USE_SCROLL"); + if (scrollParams != null && scrollParams.length == 2) { + String param = scrollParams[0]; + return new Hint( + HintType.USE_SCROLL, + new Object[] { + (param.startsWith("\"") && param.endsWith("\"")) + || (param.startsWith("'") && param.endsWith("'")) + ? param.substring(1, param.length() - 1) + : Integer.parseInt(param), + Integer.parseInt(scrollParams[1]) + }); + } else { + return new Hint(HintType.USE_SCROLL, new Object[] {50, 60000}); + } + } + if (hintAsString.startsWith("! IGNORE_UNAVAILABLE")) { + return new Hint(HintType.IGNORE_UNAVAILABLE, null); + } + if (hintAsString.startsWith("! DOCS_WITH_AGGREGATION")) { + Integer[] params = parseParamsAsInts(hintAsString, "! DOCS_WITH_AGGREGATION"); + return new Hint(HintType.DOCS_WITH_AGGREGATION, params); + } + if (hintAsString.startsWith("! ROUTINGS")) { + String[] routings = getParamsFromHint(hintAsString, "! ROUTINGS"); + return new Hint(HintType.ROUTINGS, routings); + } + if (hintAsString.startsWith("! HIGHLIGHT")) { + String[] highlights = getParamsFromHint(hintAsString, "! HIGHLIGHT"); + ArrayList hintParams = new ArrayList(); + hintParams.add(highlights[0]); + if (highlights.length > 1) { + StringBuilder builder = new StringBuilder(); + for (int i = 1; i < highlights.length; i++) { + if (i != 1) { + builder.append("\n"); + } + builder.append(highlights[i]); + } + String heighlightParam = builder.toString(); + YAMLFactory yamlFactory = new YAMLFactory(); + YAMLParser yamlParser = null; + try { + yamlParser = yamlFactory.createParser(heighlightParam.toCharArray()); + YamlXContentParser yamlXContentParser = + new YamlXContentParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, yamlParser); + Map map = yamlXContentParser.map(); + hintParams.add(map); + } catch (IOException e) { + throw new SqlParseException("could not parse heighlight hint: " + e.getMessage()); + } + } + return new Hint(HintType.HIGHLIGHT, hintParams.toArray()); + } + if (hintAsString.startsWith("! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS")) { + Integer[] params = parseParamsAsInts(hintAsString, "! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS"); + if (params.length > 3) { + throw new SqlParseException( + "MINUS_FETCH_AND_RESULT_LIMITS should have 3 int params " + + "(maxFromFirst,maxFromSecond,hitsPerScrollShard)"); + } + Integer[] paramsWithDefaults = new Integer[3]; + int defaultMaxFetchFromTable = 100000; + int defaultFetchOnScroll = 1000; + paramsWithDefaults[0] = defaultMaxFetchFromTable; + paramsWithDefaults[1] = defaultMaxFetchFromTable; + paramsWithDefaults[2] = defaultFetchOnScroll; + for (int i = 0; i < params.length; i++) { + paramsWithDefaults[i] = params[i]; + } + + return new Hint(HintType.MINUS_FETCH_AND_RESULT_LIMITS, paramsWithDefaults); + } + if (hintAsString.startsWith("! MINUS_USE_TERMS_OPTIMIZATION")) { + String[] param = getParamsFromHint(hintAsString, "! MINUS_USE_TERMS_OPTIMIZATION"); + boolean shouldLowerStringOnTerms = false; + if (param != null) { + if (param.length != 1) { + throw new SqlParseException( + "MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: false/true "); + } + try { + shouldLowerStringOnTerms = Boolean.parseBoolean(param[0].toLowerCase()); + } catch (Exception e) { + throw new SqlParseException( + "MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: " + + "false/true , got:" + + param[0]); + } + } + return new Hint( + HintType.MINUS_USE_TERMS_OPTIMIZATION, new Object[] {shouldLowerStringOnTerms}); + } + if (hintAsString.startsWith("! COLLAPSE")) { + String collapse = getParamFromHint(hintAsString, "! COLLAPSE"); + return new Hint(HintType.COLLAPSE, new String[] {collapse}); + } + if (hintAsString.startsWith("! POST_FILTER")) { + String postFilter = getParamFromHint(hintAsString, "! POST_FILTER"); + return new Hint(HintType.POST_FILTER, new String[] {postFilter}); } - private static Integer[] parseParamsAsInts(String hintAsString, String startWith) { - String[] number = getParamsFromHint(hintAsString, startWith); - if (number == null) { - return new Integer[0]; - } - //todo: check if numbers etc.. - Integer[] params = new Integer[number.length]; - for (int i = 0; i < params.length; i++) { - params[i] = Integer.parseInt(number[i]); - } - return params; + Hint queryPlanHint = parseHintForQueryPlanner(hintAsString); + if (queryPlanHint != null) { + return queryPlanHint; } + return null; + } + + /** + * Parse hints for hash join in new query planning framework. Only check syntax error here and + * leave semantics interpret work for planner. + */ + private static Hint parseHintForQueryPlanner(String hintStr) { + if (hintStr.contains("(") + && (hintStr.startsWith("! JOIN_ALGORITHM_BLOCK_SIZE") + || hintStr.startsWith("! JOIN_SCROLL_PAGE_SIZE") + || hintStr.startsWith("! JOIN_CIRCUIT_BREAK_LIMIT") + || hintStr.startsWith("! JOIN_BACK_OFF_RETRY_INTERVALS") + || hintStr.startsWith( + "! JOIN_TIME_OUT"))) { // Note that Trie tree is needed here if many hint options + + String hintName = hintStr.substring(PREFIX.length(), hintStr.indexOf('(')).trim(); + String hintPrefix = PREFIX + hintName; + HintType hintType = HintType.valueOf(hintName); + Integer[] params = parseParamsAsInts(hintStr, hintPrefix); + + if (params != null && params.length > 0) { + return new Hint(hintType, params); + } + } else if (hintStr.startsWith("! JOIN_ALGORITHM_USE_LEGACY")) { + return new Hint(HintType.JOIN_ALGORITHM_USE_LEGACY, new Object[0]); + } + return null; + } + private static String getParamFromHint(String hint, String prefix) { + if (!hint.contains("(")) { + return null; + } + return hint.replace(prefix, "") + .replaceAll("\\s*\\(\\s*", "") + .replaceAll("\\s*\\,\\s*", ",") + .replaceAll("\\s*\\)\\s*", ""); + } + + private static String[] getParamsFromHint(String hint, String prefix) { + String param = getParamFromHint(hint, prefix); + return param != null ? param.split(",") : null; + } + + private static Integer[] parseParamsAsInts(String hintAsString, String startWith) { + String[] number = getParamsFromHint(hintAsString, startWith); + if (number == null) { + return new Integer[0]; + } + // todo: check if numbers etc.. + Integer[] params = new Integer[number.length]; + for (int i = 0; i < params.length; i++) { + params[i] = Integer.parseInt(number[i]); + } + return params; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java index 7d3444c36c..0134ef0874 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java @@ -3,31 +3,28 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.hints; -/** - * Created by Eliran on 29/8/2015. - */ +/** Created by Eliran on 29/8/2015. */ public enum HintType { - HASH_WITH_TERMS_FILTER, - JOIN_LIMIT, - USE_NESTED_LOOPS, - NL_MULTISEARCH_SIZE, - USE_SCROLL, - IGNORE_UNAVAILABLE, - DOCS_WITH_AGGREGATION, - ROUTINGS, - SHARD_SIZE, - HIGHLIGHT, - MINUS_FETCH_AND_RESULT_LIMITS, - MINUS_USE_TERMS_OPTIMIZATION, - COLLAPSE, - POST_FILTER, - JOIN_ALGORITHM_BLOCK_SIZE, - JOIN_ALGORITHM_USE_LEGACY, - JOIN_SCROLL_PAGE_SIZE, - JOIN_CIRCUIT_BREAK_LIMIT, - JOIN_BACK_OFF_RETRY_INTERVALS, - JOIN_TIME_OUT + HASH_WITH_TERMS_FILTER, + JOIN_LIMIT, + USE_NESTED_LOOPS, + NL_MULTISEARCH_SIZE, + USE_SCROLL, + IGNORE_UNAVAILABLE, + DOCS_WITH_AGGREGATION, + ROUTINGS, + SHARD_SIZE, + HIGHLIGHT, + MINUS_FETCH_AND_RESULT_LIMITS, + MINUS_USE_TERMS_OPTIMIZATION, + COLLAPSE, + POST_FILTER, + JOIN_ALGORITHM_BLOCK_SIZE, + JOIN_ALGORITHM_USE_LEGACY, + JOIN_SCROLL_PAGE_SIZE, + JOIN_CIRCUIT_BREAK_LIMIT, + JOIN_BACK_OFF_RETRY_INTERVALS, + JOIN_TIME_OUT } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java index 37d9322b46..0a2093dbc6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain; import static org.opensearch.common.settings.Settings.EMPTY; @@ -33,188 +32,181 @@ import org.opensearch.sql.opensearch.setting.OpenSearchSettings; /** - * Local cluster state information which may be stale but help avoid blocking operation in NIO thread. - *

- * 1) Why extending TransportAction doesn't work here? - * TransportAction enforce implementation to be performed remotely but local cluster state read is expected here. - *

- * 2) Why injection by AbstractModule doesn't work here? - * Because this state needs to be used across the plugin, ex. in rewriter, pretty formatter etc. + * Local cluster state information which may be stale but help avoid blocking operation in NIO + * thread. + *

    + *
  1. Why extending TransportAction doesn't work here? TransportAction enforce implementation to + * be performed remotely but local cluster state read is expected here. + *
  2. Why injection by AbstractModule doesn't work here? Because this state needs to be used + * across the plugin, ex. in rewriter, pretty formatter etc. + *
*/ public class LocalClusterState { - private static final Logger LOG = LogManager.getLogger(); - - private static final Function> ALL_FIELDS = (anyIndex -> (anyField -> true)); + private static final Logger LOG = LogManager.getLogger(); - /** - * Singleton instance - */ - private static LocalClusterState INSTANCE; + private static final Function> ALL_FIELDS = + (anyIndex -> (anyField -> true)); - /** - * Current cluster state on local node - */ - private ClusterService clusterService; + /** Singleton instance */ + private static LocalClusterState INSTANCE; - private OpenSearchSettings pluginSettings; + /** Current cluster state on local node */ + private ClusterService clusterService; - /** - * Index name expression resolver to get concrete index name - */ - private IndexNameExpressionResolver resolver; + private OpenSearchSettings pluginSettings; - /** - * Thread-safe mapping cache to save the computation of sourceAsMap() which is not lightweight as thought - * Array cannot be used as key because hashCode() always return reference address, so either use wrapper or List. - */ - private final Cache, IndexMappings> cache; + /** Index name expression resolver to get concrete index name */ + private IndexNameExpressionResolver resolver; - /** - * Latest setting value for each registered key. Thread-safe is required. - */ - private final Map latestSettings = new ConcurrentHashMap<>(); + /** + * Thread-safe mapping cache to save the computation of sourceAsMap() which is not lightweight as + * thought Array cannot be used as key because hashCode() always return reference address, so + * either use wrapper or List. + */ + private final Cache, IndexMappings> cache; - public static synchronized LocalClusterState state() { - if (INSTANCE == null) { - INSTANCE = new LocalClusterState(); - } - return INSTANCE; - } + /** Latest setting value for each registered key. Thread-safe is required. */ + private final Map latestSettings = new ConcurrentHashMap<>(); - /** - * Give testing code a chance to inject mock object - */ - public static synchronized void state(LocalClusterState instance) { - INSTANCE = instance; + public static synchronized LocalClusterState state() { + if (INSTANCE == null) { + INSTANCE = new LocalClusterState(); } - - public void setClusterService(ClusterService clusterService) { - this.clusterService = clusterService; - - clusterService.addListener(event -> { - if (event.metadataChanged()) { - // State in cluster service is already changed to event.state() before listener fired - if (LOG.isDebugEnabled()) { - LOG.debug("Metadata in cluster state changed: {}", - new IndexMappings(clusterService.state().metadata())); - } - cache.invalidateAll(); + return INSTANCE; + } + + /** Give testing code a chance to inject mock object */ + public static synchronized void state(LocalClusterState instance) { + INSTANCE = instance; + } + + public void setClusterService(ClusterService clusterService) { + this.clusterService = clusterService; + + clusterService.addListener( + event -> { + if (event.metadataChanged()) { + // State in cluster service is already changed to event.state() before listener fired + if (LOG.isDebugEnabled()) { + LOG.debug( + "Metadata in cluster state changed: {}", + new IndexMappings(clusterService.state().metadata())); } + cache.invalidateAll(); + } }); - } - - public void setPluginSettings(OpenSearchSettings settings) { - this.pluginSettings = settings; - for (Setting setting: settings.getSettings()) { - clusterService.getClusterSettings().addSettingsUpdateConsumer( - setting, - newVal -> { - if (LOG.isDebugEnabled()) { - LOG.debug("The value of setting [{}] changed to [{}]", setting.getKey(), newVal); - } - latestSettings.put(setting.getKey(), newVal); + } + + public void setPluginSettings(OpenSearchSettings settings) { + this.pluginSettings = settings; + for (Setting setting : settings.getSettings()) { + clusterService + .getClusterSettings() + .addSettingsUpdateConsumer( + setting, + newVal -> { + if (LOG.isDebugEnabled()) { + LOG.debug("The value of setting [{}] changed to [{}]", setting.getKey(), newVal); } - ); - } - + latestSettings.put(setting.getKey(), newVal); + }); } - - public void setResolver(IndexNameExpressionResolver resolver) { - this.resolver = resolver; + } + + public void setResolver(IndexNameExpressionResolver resolver) { + this.resolver = resolver; + } + + private LocalClusterState() { + cache = CacheBuilder.newBuilder().maximumSize(100).build(); + } + + /** + * Get plugin setting value by key. Return default value if not configured explicitly. + * + * @param key setting key registered during plugin bootstrap. + * @return setting value or default. + */ + @SuppressWarnings("unchecked") + public T getSettingValue(Settings.Key key) { + Objects.requireNonNull(pluginSettings, "SQL plugin setting is null"); + return (T) latestSettings.getOrDefault(key.getKeyValue(), pluginSettings.getSettingValue(key)); + } + + /** Get field mappings by index expressions. All types and fields are included in response. */ + public IndexMappings getFieldMappings(String[] indices) { + return getFieldMappings(indices, ALL_FIELDS); + } + + /** + * Get field mappings by index expressions, type and field filter. Because + * IndexMetaData/MappingMetaData is hard to convert to FieldMappingMetaData, custom mapping domain + * objects are being used here. In future, it should be moved to domain model layer for all + * OpenSearch specific knowledge. + * + *

Note that cluster state may be change inside OpenSearch so it's possible to read different + * state in 2 accesses to ClusterService.state() here. + * + * @param indices index name expression + * @param fieldFilter field filter predicate + * @return index mapping(s) + */ + private IndexMappings getFieldMappings( + String[] indices, Function> fieldFilter) { + Objects.requireNonNull(clusterService, "Cluster service is null"); + Objects.requireNonNull(resolver, "Index name expression resolver is null"); + + try { + ClusterState state = clusterService.state(); + String[] concreteIndices = resolveIndexExpression(state, indices); + + IndexMappings mappings; + if (fieldFilter == ALL_FIELDS) { + mappings = findMappingsInCache(state, concreteIndices); + } else { + mappings = findMappings(state, concreteIndices, fieldFilter); + } + + LOG.debug("Found mappings: {}", mappings); + return mappings; + } catch (IndexNotFoundException e) { + throw e; + } catch (Exception e) { + throw new IllegalStateException( + "Failed to read mapping in cluster state for indices=" + Arrays.toString(indices), e); } + } - private LocalClusterState() { - cache = CacheBuilder.newBuilder().maximumSize(100).build(); - } - - /** - * Get plugin setting value by key. Return default value if not configured explicitly. - * @param key setting key registered during plugin bootstrap. - * @return setting value or default. - */ - @SuppressWarnings("unchecked") - public T getSettingValue(Settings.Key key) { - Objects.requireNonNull(pluginSettings, "SQL plugin setting is null"); - return (T) latestSettings.getOrDefault(key.getKeyValue(), - pluginSettings.getSettingValue(key)); - } + private String[] resolveIndexExpression(ClusterState state, String[] indices) { + String[] concreteIndices = + resolver.concreteIndexNames(state, IndicesOptions.strictExpandOpen(), true, indices); - /** - * Get field mappings by index expressions. All types and fields are included in response. - */ - public IndexMappings getFieldMappings(String[] indices) { - return getFieldMappings(indices, ALL_FIELDS); + if (LOG.isDebugEnabled()) { + LOG.debug( + "Resolved index expression {} to concrete index names {}", + Arrays.toString(indices), + Arrays.toString(concreteIndices)); } - - /** - * Get field mappings by index expressions, type and field filter. Because IndexMetaData/MappingMetaData - * is hard to convert to FieldMappingMetaData, custom mapping domain objects are being used here. In future, - * it should be moved to domain model layer for all OpenSearch specific knowledge. - *

- * Note that cluster state may be change inside OpenSearch so it's possible to read different state in 2 accesses - * to ClusterService.state() here. - * - * @param indices index name expression - * @param fieldFilter field filter predicate - * @return index mapping(s) - */ - private IndexMappings getFieldMappings(String[] indices, Function> fieldFilter) { - Objects.requireNonNull(clusterService, "Cluster service is null"); - Objects.requireNonNull(resolver, "Index name expression resolver is null"); - - try { - ClusterState state = clusterService.state(); - String[] concreteIndices = resolveIndexExpression(state, indices); - - IndexMappings mappings; - if (fieldFilter == ALL_FIELDS) { - mappings = findMappingsInCache(state, concreteIndices); - } else { - mappings = findMappings(state, concreteIndices, fieldFilter); - } - - LOG.debug("Found mappings: {}", mappings); - return mappings; - } catch (IndexNotFoundException e) { - throw e; - } catch (Exception e) { - throw new IllegalStateException( - "Failed to read mapping in cluster state for indices=" - + Arrays.toString(indices) , e); - } - } - - private String[] resolveIndexExpression(ClusterState state, String[] indices) { - String[] concreteIndices = resolver.concreteIndexNames(state, IndicesOptions.strictExpandOpen(), true, indices); - - if (LOG.isDebugEnabled()) { - LOG.debug("Resolved index expression {} to concrete index names {}", - Arrays.toString(indices), Arrays.toString(concreteIndices)); - } - return concreteIndices; - } - - private IndexMappings findMappings(ClusterState state, String[] indices, - Function> fieldFilter) throws IOException { - LOG.debug("Cache didn't help. Load and parse mapping in cluster state"); - return new IndexMappings( - state.metadata().findMappings(indices, fieldFilter) - ); - } - - private IndexMappings findMappingsInCache(ClusterState state, String[] indices) - throws ExecutionException { - LOG.debug("Looking for mapping in cache: {}", cache.asMap()); - return cache.get(sortToList(indices), - () -> findMappings(state, indices, ALL_FIELDS) - ); - } - - private List sortToList(T[] array) { - // Mostly array has single element - Arrays.sort(array); - return Arrays.asList(array); - } - + return concreteIndices; + } + + private IndexMappings findMappings( + ClusterState state, String[] indices, Function> fieldFilter) + throws IOException { + LOG.debug("Cache didn't help. Load and parse mapping in cluster state"); + return new IndexMappings(state.metadata().findMappings(indices, fieldFilter)); + } + + private IndexMappings findMappingsInCache(ClusterState state, String[] indices) + throws ExecutionException { + LOG.debug("Looking for mapping in cache: {}", cache.asMap()); + return cache.get(sortToList(indices), () -> findMappings(state, indices, ALL_FIELDS)); + } + + private List sortToList(T[] array) { + // Mostly array has single element + Arrays.sort(array); + return Arrays.asList(array); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java index bc6c26a6d6..89f8f9ac89 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; @@ -16,121 +15,119 @@ /** * Field mapping that parses native OpenSearch mapping. - *

- * NOTE that approaches in this class are NOT reliable because of the OpenSearch mapping query API used. - * We should deprecate this in future and parse field mapping in more solid way. + * + *

NOTE that approaches in this class are NOT reliable because of the OpenSearch mapping query + * API used. We should deprecate this in future and parse field mapping in more solid way. */ public class FieldMapping { - /** - * Name of the Field to be parsed - */ - private final String fieldName; - - /** - * Native mapping information returned from OpenSearch - */ - private final Map typeMappings; - - /** - * Maps a field name to Field object that specified in query explicitly - */ - private final Map specifiedFieldsByName; - - public FieldMapping(String fieldName) { - this(fieldName, emptyMap(), emptyMap()); - } - - public FieldMapping(String fieldName, - Map typeMappings, - Map specifiedFieldByNames) { - - this.fieldName = fieldName; - this.typeMappings = typeMappings; - this.specifiedFieldsByName = specifiedFieldByNames; - } - - /** - * Is field specified explicitly in query - * - * @return true if specified - */ - public boolean isSpecified() { - return specifiedFieldsByName.containsKey(fieldName); - } - - /** - * Verify if property field matches wildcard pattern specified in query - * - * @return true if matched - */ - public boolean isWildcardSpecified() { - return specifiedFieldsByName.containsKey(path() + ".*"); - } - - /** - * Is field a property field, which means either object field or nested field. - * - * @return true for property field - */ - public boolean isPropertyField() { - int numOfDots = StringUtils.countMatches(fieldName, '.'); - return numOfDots > 1 || (numOfDots == 1 && !isMultiField()); - } - - /** - * Is field a/in multi-field, for example, field "a.keyword" in field "a" - * - * @return true for multi field - */ - public boolean isMultiField() { - return fieldName.endsWith(".keyword"); + /** Name of the Field to be parsed */ + private final String fieldName; + + /** Native mapping information returned from OpenSearch */ + private final Map typeMappings; + + /** Maps a field name to Field object that specified in query explicitly */ + private final Map specifiedFieldsByName; + + public FieldMapping(String fieldName) { + this(fieldName, emptyMap(), emptyMap()); + } + + public FieldMapping( + String fieldName, + Map typeMappings, + Map specifiedFieldByNames) { + + this.fieldName = fieldName; + this.typeMappings = typeMappings; + this.specifiedFieldsByName = specifiedFieldByNames; + } + + /** + * Is field specified explicitly in query + * + * @return true if specified + */ + public boolean isSpecified() { + return specifiedFieldsByName.containsKey(fieldName); + } + + /** + * Verify if property field matches wildcard pattern specified in query + * + * @return true if matched + */ + public boolean isWildcardSpecified() { + return specifiedFieldsByName.containsKey(path() + ".*"); + } + + /** + * Is field a property field, which means either object field or nested field. + * + * @return true for property field + */ + public boolean isPropertyField() { + int numOfDots = StringUtils.countMatches(fieldName, '.'); + return numOfDots > 1 || (numOfDots == 1 && !isMultiField()); + } + + /** + * Is field a/in multi-field, for example, field "a.keyword" in field "a" + * + * @return true for multi field + */ + public boolean isMultiField() { + return fieldName.endsWith(".keyword"); + } + + /** + * Is field meta field, such as _id, _index, _source etc. + * + * @return true for meta field + */ + public boolean isMetaField() { + return fieldName.startsWith("_"); + } + + /** + * Path of property field, for example "employee" in "employee.manager" + * + * @return path of property field + */ + public String path() { + int lastDot = fieldName.lastIndexOf("."); + if (lastDot == -1) { + throw new IllegalStateException( + "path() is being invoked on the wrong field [" + fieldName + "]"); } - - /** - * Is field meta field, such as _id, _index, _source etc. - * - * @return true for meta field - */ - public boolean isMetaField() { - return fieldName.startsWith("_"); - } - - /** - * Path of property field, for example "employee" in "employee.manager" - * - * @return path of property field - */ - public String path() { - int lastDot = fieldName.lastIndexOf("."); - if (lastDot == -1) { - throw new IllegalStateException("path() is being invoked on the wrong field [" + fieldName + "]"); - } - return fieldName.substring(0, lastDot); - } - - /** - * Find field type in OpenSearch Get Field Mapping API response. Note that Get Field Mapping API does NOT return - * the type for object or nested field. In this case, object type is used as default under the assumption - * that the field queried here must exist (which is true if semantic analyzer is enabled). - * - * @return field type if found in mapping, otherwise "object" type returned - */ - @SuppressWarnings("unchecked") - public String type() { - FieldMappingMetadata metaData = typeMappings.get(fieldName); - if (metaData == null) { - return DescribeResultSet.DEFAULT_OBJECT_DATATYPE; - } - - Map source = metaData.sourceAsMap(); - String[] fieldPath = fieldName.split("\\."); - - // For object/nested field, fieldName is full path though only innermost field name present in mapping - // For example, fieldName='employee.location.city', metaData='{"city":{"type":"text"}}' - String innermostFieldName = (fieldPath.length == 1) ? fieldName : fieldPath[fieldPath.length - 1]; - Map fieldMapping = (Map) source.get(innermostFieldName); - return (String) fieldMapping.get("type"); + return fieldName.substring(0, lastDot); + } + + /** + * Find field type in OpenSearch Get Field Mapping API response. Note that Get Field Mapping API + * does NOT return the type for object or nested field. In this case, object type is used as + * default under the assumption that the field queried here must exist (which is true if semantic + * analyzer is enabled). + * + * @return field type if found in mapping, otherwise "object" type returned + */ + @SuppressWarnings("unchecked") + public String type() { + FieldMappingMetadata metaData = typeMappings.get(fieldName); + if (metaData == null) { + return DescribeResultSet.DEFAULT_OBJECT_DATATYPE; } + Map source = metaData.sourceAsMap(); + String[] fieldPath = fieldName.split("\\."); + + // For object/nested field, fieldName is full path though only innermost field name present in + // mapping + // For example, fieldName='employee.location.city', metaData='{"city":{"type":"text"}}' + String innermostFieldName = + (fieldPath.length == 1) ? fieldName : fieldPath[fieldPath.length - 1]; + Map fieldMapping = (Map) source.get(innermostFieldName); + return (String) fieldMapping.get("type"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java index 6f73da62e4..05b3f2854e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import java.util.HashMap; @@ -15,6 +14,7 @@ import org.opensearch.cluster.metadata.MappingMetadata; /** + *

  * Field mappings in a specific type.
  * 

* Sample: @@ -35,113 +35,104 @@ * 'keyword': { * 'type': keyword, * 'ignore_above': 256 - * } - * } - * } - * } - * } + * }}}}} + *

*/ @SuppressWarnings("unchecked") public class FieldMappings implements Mappings> { - private static final String PROPERTIES = "properties"; + private static final String PROPERTIES = "properties"; - /** - * Mapping from field name to its type - */ - private final Map fieldMappings; + /** Mapping from field name to its type */ + private final Map fieldMappings; - public FieldMappings(MappingMetadata mappings) { - fieldMappings = mappings.sourceAsMap(); - } + public FieldMappings(MappingMetadata mappings) { + fieldMappings = mappings.sourceAsMap(); + } - public FieldMappings(Map> mapping) { - Map finalMapping = new HashMap<>(); - finalMapping.put(PROPERTIES, mapping); - fieldMappings = finalMapping; - } + public FieldMappings(Map> mapping) { + Map finalMapping = new HashMap<>(); + finalMapping.put(PROPERTIES, mapping); + fieldMappings = finalMapping; + } - @Override - public boolean has(String path) { - return mapping(path) != null; - } + @Override + public boolean has(String path) { + return mapping(path) != null; + } - /** - * Different from default implementation that search mapping for path is required - */ - @Override - public Map mapping(String path) { - Map mapping = fieldMappings; - for (String name : path.split("\\.")) { - if (mapping == null || !mapping.containsKey(PROPERTIES)) { - return null; - } - - mapping = (Map) - ((Map) mapping.get(PROPERTIES)).get(name); - } - return mapping; - } + /** Different from default implementation that search mapping for path is required */ + @Override + public Map mapping(String path) { + Map mapping = fieldMappings; + for (String name : path.split("\\.")) { + if (mapping == null || !mapping.containsKey(PROPERTIES)) { + return null; + } - @Override - public Map> data() { - // Is this assumption true? Is it possible mapping of field is NOT a Map? - return (Map>) fieldMappings.get(PROPERTIES); + mapping = (Map) ((Map) mapping.get(PROPERTIES)).get(name); } - - public void flat(BiConsumer func) { - flatMappings(data(), Optional.empty(), func); + return mapping; + } + + @Override + public Map> data() { + // Is this assumption true? Is it possible mapping of field is NOT a Map? + return (Map>) fieldMappings.get(PROPERTIES); + } + + public void flat(BiConsumer func) { + flatMappings(data(), Optional.empty(), func); + } + + @SuppressWarnings("unchecked") + private void flatMappings( + Map> mappings, + Optional path, + BiConsumer func) { + mappings.forEach( + (fieldName, mapping) -> { + String fullFieldName = path.map(s -> s + "." + fieldName).orElse(fieldName); + String type = (String) mapping.getOrDefault("type", "object"); + func.accept(fullFieldName, type); + + if (mapping.containsKey("fields")) { + ((Map>) mapping.get("fields")) + .forEach( + (innerFieldName, innerMapping) -> + func.accept( + fullFieldName + "." + innerFieldName, + (String) innerMapping.getOrDefault("type", "object"))); + } + + if (mapping.containsKey("properties")) { + flatMappings( + (Map>) mapping.get("properties"), + Optional.of(fullFieldName), + func); + } + }); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @SuppressWarnings("unchecked") - private void flatMappings(Map> mappings, - Optional path, - BiConsumer func) { - mappings.forEach( - (fieldName, mapping) -> { - String fullFieldName = path.map(s -> s + "." + fieldName).orElse(fieldName); - String type = (String) mapping.getOrDefault("type", "object"); - func.accept(fullFieldName, type); - - if (mapping.containsKey("fields")) { - ((Map>) mapping.get("fields")).forEach( - (innerFieldName, innerMapping) -> - func.accept(fullFieldName + "." + innerFieldName, - (String) innerMapping.getOrDefault("type", "object")) - ); - } - - if (mapping.containsKey("properties")) { - flatMappings( - (Map>) mapping.get("properties"), - Optional.of(fullFieldName), - func - ); - } - } - ); + if (o == null || getClass() != o.getClass()) { + return false; } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - FieldMappings that = (FieldMappings) o; - return Objects.equals(fieldMappings, that.fieldMappings); - } - - @Override - public int hashCode() { - return Objects.hash(fieldMappings); - } - - @Override - public String toString() { - return "FieldMappings" + new JSONObject(fieldMappings).toString(2); - } - + FieldMappings that = (FieldMappings) o; + return Objects.equals(fieldMappings, that.fieldMappings); + } + + @Override + public int hashCode() { + return Objects.hash(fieldMappings); + } + + @Override + public String toString() { + return "FieldMappings" + new JSONObject(fieldMappings).toString(2); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java index 3b89eef02f..22cb99c44e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; @@ -14,6 +13,7 @@ import org.opensearch.cluster.metadata.Metadata; /** + *
  * Index mappings in the cluster.
  * 

* Sample: @@ -30,53 +30,53 @@ * 2) FieldMetadata: * ((Map) client.admin().indices().getFieldMappings(request).actionGet().mappings().get("bank") * .get("account").get("balance").sourceAsMap().get("balance")).get("type") + *

*/ public class IndexMappings implements Mappings { - public static final IndexMappings EMPTY = new IndexMappings(); + public static final IndexMappings EMPTY = new IndexMappings(); - /** - * Mapping from Index name to mappings of all fields in it - */ - private final Map indexMappings; + /** Mapping from Index name to mappings of all fields in it */ + private final Map indexMappings; - public IndexMappings() { - this.indexMappings = emptyMap(); - } + public IndexMappings() { + this.indexMappings = emptyMap(); + } - public IndexMappings(Metadata metaData) { - this.indexMappings = buildMappings(metaData.indices(), - indexMetaData -> new FieldMappings(indexMetaData.mapping())); - } + public IndexMappings(Metadata metaData) { + this.indexMappings = + buildMappings( + metaData.indices(), indexMetaData -> new FieldMappings(indexMetaData.mapping())); + } - public IndexMappings(Map mappings) { - this.indexMappings = buildMappings(mappings, FieldMappings::new); - } + public IndexMappings(Map mappings) { + this.indexMappings = buildMappings(mappings, FieldMappings::new); + } - @Override - public Map data() { - return indexMappings; - } + @Override + public Map data() { + return indexMappings; + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - IndexMappings that = (IndexMappings) o; - return Objects.equals(indexMappings, that.indexMappings); + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public int hashCode() { - return Objects.hash(indexMappings); + if (o == null || getClass() != o.getClass()) { + return false; } + IndexMappings that = (IndexMappings) o; + return Objects.equals(indexMappings, that.indexMappings); + } - @Override - public String toString() { - return "IndexMappings{" + indexMappings + '}'; - } + @Override + public int hashCode() { + return Objects.hash(indexMappings); + } + + @Override + public String toString() { + return "IndexMappings{" + indexMappings + '}'; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java index 03bfcaf030..3cf02b55d8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import java.util.Collection; @@ -12,43 +11,43 @@ import java.util.stream.Collectors; /** - * Mappings interface to provide default implementation (minimal set of Map methods) for subclass in hierarchy. + * Mappings interface to provide default implementation (minimal set of Map methods) for subclass in + * hierarchy. * * @param Type of nested mapping */ public interface Mappings { - default boolean has(String name) { - return data().containsKey(name); - } + default boolean has(String name) { + return data().containsKey(name); + } - default Collection allNames() { - return data().keySet(); - } + default Collection allNames() { + return data().keySet(); + } - default T mapping(String name) { - return data().get(name); - } + default T mapping(String name) { + return data().get(name); + } - default T firstMapping() { - return allMappings().iterator().next(); - } + default T firstMapping() { + return allMappings().iterator().next(); + } - default Collection allMappings() { - return data().values(); - } + default Collection allMappings() { + return data().values(); + } - default boolean isEmpty() { - return data().isEmpty(); - } + default boolean isEmpty() { + return data().isEmpty(); + } - Map data(); + Map data(); - /** - * Build a map from an existing map by applying provided function to each value. - */ - default Map buildMappings(Map mappings, Function func) { - return mappings.entrySet().stream().collect( + /** Build a map from an existing map by applying provided function to each value. */ + default Map buildMappings(Map mappings, Function func) { + return mappings.entrySet().stream() + .collect( Collectors.toUnmodifiableMap(Map.Entry::getKey, func.compose(Map.Entry::getValue))); - } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java index d56ff231e0..c58bba9e26 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import org.opensearch.sql.legacy.executor.csv.CSVResultRestExecutor; @@ -12,41 +11,37 @@ import org.opensearch.sql.legacy.query.join.OpenSearchJoinQueryAction; import org.opensearch.sql.legacy.query.multi.MultiQueryAction; -/** - * Created by Eliran on 26/12/2015. - */ +/** Created by Eliran on 26/12/2015. */ public class ActionRequestRestExecutorFactory { - /** - * Create executor based on the format and wrap with AsyncRestExecutor - * to async blocking execute() call if necessary. - * - * @param format format of response - * @param queryAction query action - * @return executor - */ - public static RestExecutor createExecutor(Format format, QueryAction queryAction) { - switch (format) { - case CSV: - return new AsyncRestExecutor(new CSVResultRestExecutor()); - case JSON: - return new AsyncRestExecutor( - new ElasticDefaultRestExecutor(queryAction), - action -> isJoin(action) || isUnionMinus(action) - ); - case JDBC: - case RAW: - case TABLE: - default: - return new AsyncRestExecutor(new PrettyFormatRestExecutor(format.getFormatName())); - } + /** + * Create executor based on the format and wrap with AsyncRestExecutor to async blocking execute() + * call if necessary. + * + * @param format format of response + * @param queryAction query action + * @return executor + */ + public static RestExecutor createExecutor(Format format, QueryAction queryAction) { + switch (format) { + case CSV: + return new AsyncRestExecutor(new CSVResultRestExecutor()); + case JSON: + return new AsyncRestExecutor( + new ElasticDefaultRestExecutor(queryAction), + action -> isJoin(action) || isUnionMinus(action)); + case JDBC: + case RAW: + case TABLE: + default: + return new AsyncRestExecutor(new PrettyFormatRestExecutor(format.getFormatName())); } + } - private static boolean isJoin(QueryAction queryAction) { - return queryAction instanceof OpenSearchJoinQueryAction; - } - - private static boolean isUnionMinus(QueryAction queryAction) { - return queryAction instanceof MultiQueryAction; - } + private static boolean isJoin(QueryAction queryAction) { + return queryAction instanceof OpenSearchJoinQueryAction; + } + private static boolean isUnionMinus(QueryAction queryAction) { + return queryAction instanceof MultiQueryAction; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java index 454babd2e9..c47092f10b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import com.google.common.collect.ImmutableMap; @@ -14,25 +13,25 @@ @RequiredArgsConstructor public enum Format { - JDBC("jdbc"), - JSON("json"), - CSV("csv"), - RAW("raw"), - TABLE("table"); + JDBC("jdbc"), + JSON("json"), + CSV("csv"), + RAW("raw"), + TABLE("table"); - @Getter - private final String formatName; + @Getter private final String formatName; - private static final Map ALL_FORMATS; - static { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (Format format : Format.values()) { - builder.put(format.formatName, format); - } - ALL_FORMATS = builder.build(); - } + private static final Map ALL_FORMATS; - public static Optional of(String formatName) { - return Optional.ofNullable(ALL_FORMATS.getOrDefault(formatName, null)); + static { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (Format format : Format.values()) { + builder.put(format.formatName, format); } + ALL_FORMATS = builder.build(); + } + + public static Optional of(String formatName) { + return Optional.ofNullable(ALL_FORMATS.getOrDefault(formatName, null)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java index 591319c74c..a88c3e5724 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; @@ -23,80 +22,79 @@ import org.opensearch.rest.action.RestBuilderListener; import org.opensearch.sql.legacy.antlr.semantic.SemanticAnalysisException; -/** - * Created by Eliran on 6/10/2015. - */ +/** Created by Eliran on 6/10/2015. */ public class GetIndexRequestRestListener extends RestBuilderListener { - private GetIndexRequest getIndexRequest; - - public GetIndexRequestRestListener(RestChannel channel, GetIndexRequest getIndexRequest) { - super(channel); - this.getIndexRequest = getIndexRequest; - } - - @Override - public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBuilder builder) throws Exception { - GetIndexRequest.Feature[] features = getIndexRequest.features(); - String[] indices = getIndexResponse.indices(); - - builder.startObject(); - for (String index : indices) { - builder.startObject(index); - for (GetIndexRequest.Feature feature : features) { - switch (feature) { - case ALIASES: - writeAliases(getIndexResponse.aliases().get(index), builder, channel.request()); - break; - case MAPPINGS: - writeMappings(getIndexResponse.mappings().get(index), builder, channel.request()); - break; - case SETTINGS: - writeSettings(getIndexResponse.settings().get(index), builder, channel.request()); - break; - default: - throw new SemanticAnalysisException("Unsupported feature: " + feature); - } - } - builder.endObject(); - + private GetIndexRequest getIndexRequest; + + public GetIndexRequestRestListener(RestChannel channel, GetIndexRequest getIndexRequest) { + super(channel); + this.getIndexRequest = getIndexRequest; + } + + @Override + public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBuilder builder) + throws Exception { + GetIndexRequest.Feature[] features = getIndexRequest.features(); + String[] indices = getIndexResponse.indices(); + + builder.startObject(); + for (String index : indices) { + builder.startObject(index); + for (GetIndexRequest.Feature feature : features) { + switch (feature) { + case ALIASES: + writeAliases(getIndexResponse.aliases().get(index), builder, channel.request()); + break; + case MAPPINGS: + writeMappings(getIndexResponse.mappings().get(index), builder, channel.request()); + break; + case SETTINGS: + writeSettings(getIndexResponse.settings().get(index), builder, channel.request()); + break; + default: + throw new SemanticAnalysisException("Unsupported feature: " + feature); } - builder.endObject(); - - return new BytesRestResponse(RestStatus.OK, builder); + } + builder.endObject(); } - - private void writeAliases(List aliases, XContentBuilder builder, ToXContent.Params params) - throws IOException { - builder.startObject(Fields.ALIASES); - if (aliases != null) { - for (AliasMetadata alias : aliases) { - AliasMetadata.Builder.toXContent(alias, builder, params); - } - } - builder.endObject(); - } - - private void writeSettings(Settings settings, XContentBuilder builder, ToXContent.Params params) - throws IOException { - builder.startObject(Fields.SETTINGS); - settings.toXContent(builder, params); - builder.endObject(); - } - - private void writeMappings(MappingMetadata mappingMetadata, - XContentBuilder builder, ToXContent.Params params) throws IOException { - if ( mappingMetadata != null) { - builder.field(Fields.MAPPINGS); - builder.map(mappingMetadata.getSourceAsMap()); - } + builder.endObject(); + + return new BytesRestResponse(RestStatus.OK, builder); + } + + private void writeAliases( + List aliases, XContentBuilder builder, ToXContent.Params params) + throws IOException { + builder.startObject(Fields.ALIASES); + if (aliases != null) { + for (AliasMetadata alias : aliases) { + AliasMetadata.Builder.toXContent(alias, builder, params); + } } - - - static class Fields { - static final String ALIASES = "aliases"; - static final String MAPPINGS = "mappings"; - static final String SETTINGS = "settings"; - static final String WARMERS = "warmers"; + builder.endObject(); + } + + private void writeSettings(Settings settings, XContentBuilder builder, ToXContent.Params params) + throws IOException { + builder.startObject(Fields.SETTINGS); + settings.toXContent(builder, params); + builder.endObject(); + } + + private void writeMappings( + MappingMetadata mappingMetadata, XContentBuilder builder, ToXContent.Params params) + throws IOException { + if (mappingMetadata != null) { + builder.field(Fields.MAPPINGS); + builder.map(mappingMetadata.getSourceAsMap()); } + } + + static class Fields { + static final String ALIASES = "aliases"; + static final String MAPPINGS = "mappings"; + static final String SETTINGS = "settings"; + static final String WARMERS = "warmers"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java index 5297fa38ff..aa0d02bed8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import org.json.JSONObject; @@ -11,59 +10,59 @@ public class ErrorMessage { - protected E exception; + protected E exception; - private int status; - private String type; - private String reason; - private String details; + private int status; + private String type; + private String reason; + private String details; - public ErrorMessage(E exception, int status) { - this.exception = exception; - this.status = status; + public ErrorMessage(E exception, int status) { + this.exception = exception; + this.status = status; - this.type = fetchType(); - this.reason = fetchReason(); - this.details = fetchDetails(); - } + this.type = fetchType(); + this.reason = fetchReason(); + this.details = fetchDetails(); + } - private String fetchType() { - return exception.getClass().getSimpleName(); - } + private String fetchType() { + return exception.getClass().getSimpleName(); + } - protected String fetchReason() { - return status == RestStatus.BAD_REQUEST.getStatus() - ? "Invalid SQL query" - : "There was internal problem at backend"; - } + protected String fetchReason() { + return status == RestStatus.BAD_REQUEST.getStatus() + ? "Invalid SQL query" + : "There was internal problem at backend"; + } - protected String fetchDetails() { - // Some exception prints internal information (full class name) which is security concern - //return exception.toString(); - return emptyStringIfNull(exception.getLocalizedMessage()); - } + protected String fetchDetails() { + // Some exception prints internal information (full class name) which is security concern + // return exception.toString(); + return emptyStringIfNull(exception.getLocalizedMessage()); + } - private String emptyStringIfNull(String str) { - return str != null ? str : ""; - } + private String emptyStringIfNull(String str) { + return str != null ? str : ""; + } - @Override - public String toString() { - JSONObject output = new JSONObject(); + @Override + public String toString() { + JSONObject output = new JSONObject(); - output.put("status", status); - output.put("error", getErrorAsJson()); + output.put("status", status); + output.put("error", getErrorAsJson()); - return output.toString(2); - } + return output.toString(2); + } - private JSONObject getErrorAsJson() { - JSONObject errorJson = new JSONObject(); + private JSONObject getErrorAsJson() { + JSONObject errorJson = new JSONObject(); - errorJson.put("type", type); - errorJson.put("reason", reason); - errorJson.put("details", details); + errorJson.put("type", type); + errorJson.put("reason", reason); + errorJson.put("details", details); - return errorJson; - } + return errorJson; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java index 0e96fe9b67..ba28ee8325 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java @@ -3,42 +3,40 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import org.opensearch.OpenSearchException; public class ErrorMessageFactory { - /** - * Create error message based on the exception type - * Exceptions of OpenSearch exception type and exceptions with wrapped OpenSearch exception causes - * should create {@link OpenSearchErrorMessage} - * - * @param e exception to create error message - * @param status exception status code - * @return error message - */ - - public static ErrorMessage createErrorMessage(Exception e, int status) { - if (e instanceof OpenSearchException) { - return new OpenSearchErrorMessage((OpenSearchException) e, - ((OpenSearchException) e).status().getStatus()); - } else if (unwrapCause(e) instanceof OpenSearchException) { - OpenSearchException exception = (OpenSearchException) unwrapCause(e); - return new OpenSearchErrorMessage(exception, exception.status().getStatus()); - } - return new ErrorMessage(e, status); + /** + * Create error message based on the exception type Exceptions of OpenSearch exception type and + * exceptions with wrapped OpenSearch exception causes should create {@link + * OpenSearchErrorMessage} + * + * @param e exception to create error message + * @param status exception status code + * @return error message + */ + public static ErrorMessage createErrorMessage(Exception e, int status) { + if (e instanceof OpenSearchException) { + return new OpenSearchErrorMessage( + (OpenSearchException) e, ((OpenSearchException) e).status().getStatus()); + } else if (unwrapCause(e) instanceof OpenSearchException) { + OpenSearchException exception = (OpenSearchException) unwrapCause(e); + return new OpenSearchErrorMessage(exception, exception.status().getStatus()); } + return new ErrorMessage(e, status); + } - public static Throwable unwrapCause(Throwable t) { - Throwable result = t; - if (result instanceof OpenSearchException) { - return result; - } - if (result.getCause() == null) { - return result; - } - result = unwrapCause(result.getCause()); - return result; + public static Throwable unwrapCause(Throwable t) { + Throwable result = t; + if (result instanceof OpenSearchException) { + return result; + } + if (result.getCause() == null) { + return result; } + result = unwrapCause(result.getCause()); + return result; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java index aa6ea05389..908a5fdfed 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; @@ -30,136 +29,144 @@ import org.opensearch.sql.legacy.domain.Select; import org.opensearch.sql.legacy.query.join.BackOffRetryStrategy; -/** - * Created by Eliran on 2/9/2016. - */ +/** Created by Eliran on 2/9/2016. */ public class ElasticUtils { - public static SearchResponse scrollOneTimeWithHits(Client client, SearchRequestBuilder requestBuilder, - Select originalSelect, int resultSize) { - SearchRequestBuilder scrollRequest = requestBuilder - .setScroll(new TimeValue(60000)).setSize(resultSize); - boolean ordered = originalSelect.isOrderdSelect(); - if (!ordered) { - scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); - } - SearchResponse responseWithHits = scrollRequest.get(); - //on ordered select - not using SCAN , elastic returns hits on first scroll - //es5.0 elastic always return docs on scan -// if(!ordered) { -// responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) -// .setScroll(new TimeValue(600000)).get(); -// } - return responseWithHits; + public static SearchResponse scrollOneTimeWithHits( + Client client, SearchRequestBuilder requestBuilder, Select originalSelect, int resultSize) { + SearchRequestBuilder scrollRequest = + requestBuilder.setScroll(new TimeValue(60000)).setSize(resultSize); + boolean ordered = originalSelect.isOrderdSelect(); + if (!ordered) { + scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); + } + SearchResponse responseWithHits = scrollRequest.get(); + // on ordered select - not using SCAN , elastic returns hits on first scroll + // es5.0 elastic always return docs on scan + // if(!ordered) { + // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) + // .setScroll(new TimeValue(600000)).get(); + // } + return responseWithHits; + } + + // use our deserializer instead of results toXcontent because the source field is different from + // sourceAsMap. + public static String hitsAsStringResult(SearchHits results, MetaSearchResult metaResults) + throws IOException { + if (results == null) { + return null; + } + Object[] searchHits; + searchHits = + new Object + [Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L).intValue()]; + int i = 0; + for (SearchHit hit : results) { + HashMap value = new HashMap<>(); + value.put("_id", hit.getId()); + value.put("_score", hit.getScore()); + value.put("_source", hit.getSourceAsMap()); + searchHits[i] = value; + i++; + } + HashMap hits = new HashMap<>(); + hits.put( + "total", + ImmutableMap.of( + "value", Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L), + "relation", + Optional.ofNullable(results.getTotalHits()) + .map(th -> th.relation) + .orElse(Relation.EQUAL_TO))); + hits.put("max_score", results.getMaxScore()); + hits.put("hits", searchHits); + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + builder.field("took", metaResults.getTookImMilli()); + builder.field("timed_out", metaResults.isTimedOut()); + builder.field( + "_shards", + ImmutableMap.of( + "total", + metaResults.getTotalNumOfShards(), + "successful", + metaResults.getSuccessfulShards(), + "failed", + metaResults.getFailedShards())); + builder.field("hits", hits); + builder.endObject(); + return BytesReference.bytes(builder).utf8ToString(); + } + + /** Generate string by serializing SearchHits in place without any new HashMap copy */ + public static XContentBuilder hitsAsStringResultZeroCopy( + List results, MetaSearchResult metaResults, ElasticJoinExecutor executor) + throws IOException { + BytesStreamOutput outputStream = new BytesStreamOutput(); + + XContentBuilder builder = XContentFactory.jsonBuilder(outputStream).prettyPrint(); + builder.startObject(); + builder.field("took", metaResults.getTookImMilli()); + builder.field("timed_out", metaResults.isTimedOut()); + builder.field( + "_shards", + ImmutableMap.of( + "total", metaResults.getTotalNumOfShards(), + "successful", metaResults.getSuccessfulShards(), + "failed", metaResults.getFailedShards())); + toXContent(builder, EMPTY_PARAMS, results, executor); + builder.endObject(); + + if (!BackOffRetryStrategy.isHealthy(2 * outputStream.size(), executor)) { + throw new IllegalStateException("Memory could be insufficient when sendResponse()."); } - - //use our deserializer instead of results toXcontent because the source field is different from sourceAsMap. - public static String hitsAsStringResult(SearchHits results, MetaSearchResult metaResults) throws IOException { - if (results == null) { - return null; - } - Object[] searchHits; - searchHits = new Object[Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L).intValue()]; - int i = 0; - for (SearchHit hit : results) { - HashMap value = new HashMap<>(); - value.put("_id", hit.getId()); - value.put("_score", hit.getScore()); - value.put("_source", hit.getSourceAsMap()); - searchHits[i] = value; - i++; - } - HashMap hits = new HashMap<>(); - hits.put("total", ImmutableMap.of( - "value", Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L), - "relation", Optional.ofNullable(results.getTotalHits()).map(th -> th.relation).orElse(Relation.EQUAL_TO) - )); - hits.put("max_score", results.getMaxScore()); - hits.put("hits", searchHits); - XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); - builder.startObject(); - builder.field("took", metaResults.getTookImMilli()); - builder.field("timed_out", metaResults.isTimedOut()); - builder.field("_shards", ImmutableMap.of("total", metaResults.getTotalNumOfShards(), - "successful", metaResults.getSuccessfulShards() - , "failed", metaResults.getFailedShards())); - builder.field("hits", hits); - builder.endObject(); - return BytesReference.bytes(builder).utf8ToString(); + return builder; + } + + /** Code copy from SearchHits */ + private static void toXContent( + XContentBuilder builder, Params params, List hits, ElasticJoinExecutor executor) + throws IOException { + builder.startObject(SearchHits.Fields.HITS); + builder.field( + SearchHits.Fields.TOTAL, + ImmutableMap.of("value", hits.size(), "relation", Relation.EQUAL_TO)); + builder.field(SearchHits.Fields.MAX_SCORE, 1.0f); + builder.field(SearchHits.Fields.HITS); + builder.startArray(); + + for (int i = 0; i < hits.size(); i++) { + if (i % 10000 == 0 && !BackOffRetryStrategy.isHealthy()) { + throw new IllegalStateException("Memory circuit break when generating json builder"); + } + toXContent(builder, params, hits.get(i)); } - /** - * Generate string by serializing SearchHits in place without any new HashMap copy - */ - public static XContentBuilder hitsAsStringResultZeroCopy(List results, MetaSearchResult metaResults, - ElasticJoinExecutor executor) throws IOException { - BytesStreamOutput outputStream = new BytesStreamOutput(); - - XContentBuilder builder = XContentFactory.jsonBuilder(outputStream).prettyPrint(); - builder.startObject(); - builder.field("took", metaResults.getTookImMilli()); - builder.field("timed_out", metaResults.isTimedOut()); - builder.field("_shards", ImmutableMap.of( - "total", metaResults.getTotalNumOfShards(), - "successful", metaResults.getSuccessfulShards(), - "failed", metaResults.getFailedShards() - )); - toXContent(builder, EMPTY_PARAMS, results, executor); - builder.endObject(); - - if (!BackOffRetryStrategy.isHealthy(2 * outputStream.size(), executor)) { - throw new IllegalStateException("Memory could be insufficient when sendResponse()."); - } - - return builder; + builder.endArray(); + builder.endObject(); + } + + /** Code copy from SearchHit but only keep fields interested and replace source by sourceMap */ + private static void toXContent(XContentBuilder builder, Params params, SearchHit hit) + throws IOException { + builder.startObject(); + if (hit.getId() != null) { + builder.field("_id", hit.getId()); } - /** - * Code copy from SearchHits - */ - private static void toXContent(XContentBuilder builder, Params params, List hits, - ElasticJoinExecutor executor) throws IOException { - builder.startObject(SearchHits.Fields.HITS); - builder.field(SearchHits.Fields.TOTAL, ImmutableMap.of( - "value", hits.size(), - "relation", Relation.EQUAL_TO - )); - builder.field(SearchHits.Fields.MAX_SCORE, 1.0f); - builder.field(SearchHits.Fields.HITS); - builder.startArray(); - - for (int i = 0; i < hits.size(); i++) { - if (i % 10000 == 0 && !BackOffRetryStrategy.isHealthy()) { - throw new IllegalStateException("Memory circuit break when generating json builder"); - } - toXContent(builder, params, hits.get(i)); - } - - builder.endArray(); - builder.endObject(); + if (Float.isNaN(hit.getScore())) { + builder.nullField("_score"); + } else { + builder.field("_score", hit.getScore()); } - /** - * Code copy from SearchHit but only keep fields interested and replace source by sourceMap + /* + * Use sourceMap rather than binary source because source is out-of-date + * and only used when creating a new instance of SearchHit */ - private static void toXContent(XContentBuilder builder, Params params, SearchHit hit) throws IOException { - builder.startObject(); - if (hit.getId() != null) { - builder.field("_id", hit.getId()); - } - - if (Float.isNaN(hit.getScore())) { - builder.nullField("_score"); - } else { - builder.field("_score", hit.getScore()); - } - - /* - * Use sourceMap rather than binary source because source is out-of-date - * and only used when creating a new instance of SearchHit - */ - builder.field("_source", hit.getSourceAsMap()); - builder.endObject(); - } + builder.field("_source", hit.getSourceAsMap()); + builder.endObject(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java index 52d292a2e5..8216feac66 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.util.ArrayList; @@ -14,55 +13,56 @@ import org.opensearch.search.SearchHit; import org.opensearch.sql.legacy.domain.Field; -/** - * Created by Eliran on 2/11/2015. - */ +/** Created by Eliran on 2/11/2015. */ public class HashJoinComparisonStructure { - private HashMap>> comparisonIDtoComparisonFields; - private HashMap> comparisonIDtoComparisonHash; + private HashMap>> comparisonIDtoComparisonFields; + private HashMap> comparisonIDtoComparisonHash; - public HashJoinComparisonStructure(List>> t1ToT2FieldsComparisons) { - comparisonIDtoComparisonFields = new HashMap<>(); - comparisonIDtoComparisonHash = new HashMap<>(); - if (t1ToT2FieldsComparisons == null || t1ToT2FieldsComparisons.size() == 0) { - String comparisonId = UUID.randomUUID().toString(); - this.comparisonIDtoComparisonFields.put(comparisonId, new ArrayList>()); - this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); - } - for (List> comparisonFields : t1ToT2FieldsComparisons) { - String comparisonId = UUID.randomUUID().toString(); - //maby from field to List ? - this.comparisonIDtoComparisonFields.put(comparisonId, comparisonFields); - this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); - } + public HashJoinComparisonStructure(List>> t1ToT2FieldsComparisons) { + comparisonIDtoComparisonFields = new HashMap<>(); + comparisonIDtoComparisonHash = new HashMap<>(); + if (t1ToT2FieldsComparisons == null || t1ToT2FieldsComparisons.size() == 0) { + String comparisonId = UUID.randomUUID().toString(); + this.comparisonIDtoComparisonFields.put( + comparisonId, new ArrayList>()); + this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); } - - public HashMap>> getComparisons() { - return comparisonIDtoComparisonFields; + for (List> comparisonFields : t1ToT2FieldsComparisons) { + String comparisonId = UUID.randomUUID().toString(); + // maby from field to List ? + this.comparisonIDtoComparisonFields.put(comparisonId, comparisonFields); + this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); } + } - public void insertIntoComparisonHash(String comparisonID, String comparisonKey, SearchHit hit) { - HashMap comparisonHash = this.comparisonIDtoComparisonHash.get(comparisonID); - SearchHitsResult currentSearchHitsResult = comparisonHash.get(comparisonKey); - if (currentSearchHitsResult == null) { - currentSearchHitsResult = new SearchHitsResult(new ArrayList(), false); - comparisonHash.put(comparisonKey, currentSearchHitsResult); - } - currentSearchHitsResult.getSearchHits().add(hit); - } + public HashMap>> getComparisons() { + return comparisonIDtoComparisonFields; + } - public SearchHitsResult searchForMatchingSearchHits(String comparisonID, String comparisonKey) { - HashMap comparisonHash = this.comparisonIDtoComparisonHash.get(comparisonID); - return comparisonHash.get(comparisonKey); + public void insertIntoComparisonHash(String comparisonID, String comparisonKey, SearchHit hit) { + HashMap comparisonHash = + this.comparisonIDtoComparisonHash.get(comparisonID); + SearchHitsResult currentSearchHitsResult = comparisonHash.get(comparisonKey); + if (currentSearchHitsResult == null) { + currentSearchHitsResult = new SearchHitsResult(new ArrayList(), false); + comparisonHash.put(comparisonKey, currentSearchHitsResult); } + currentSearchHitsResult.getSearchHits().add(hit); + } - public List getAllSearchHits() { - List allSearchHits = new ArrayList<>(); + public SearchHitsResult searchForMatchingSearchHits(String comparisonID, String comparisonKey) { + HashMap comparisonHash = + this.comparisonIDtoComparisonHash.get(comparisonID); + return comparisonHash.get(comparisonKey); + } - for (HashMap comparisonHash : this.comparisonIDtoComparisonHash.values()) { - allSearchHits.addAll(comparisonHash.values()); - } - return allSearchHits; - } + public List getAllSearchHits() { + List allSearchHits = new ArrayList<>(); + for (HashMap comparisonHash : + this.comparisonIDtoComparisonHash.values()) { + allSearchHits.addAll(comparisonHash.values()); + } + return allSearchHits; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java index 5703cf2ef5..06a913205d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -34,341 +33,385 @@ import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; import org.opensearch.sql.legacy.query.maker.QueryMaker; -/** - * Created by Eliran on 22/8/2015. - */ +/** Created by Eliran on 22/8/2015. */ public class HashJoinElasticExecutor extends ElasticJoinExecutor { - private HashJoinElasticRequestBuilder requestBuilder; - - - private Client client; - private boolean useQueryTermsFilterOptimization = false; - private final int MAX_RESULTS_FOR_FIRST_TABLE = 100000; - HashJoinComparisonStructure hashJoinComparisonStructure; - private Set alreadyMatched; - - public HashJoinElasticExecutor(Client client, HashJoinElasticRequestBuilder requestBuilder) { - super(requestBuilder); - this.client = client; - this.requestBuilder = requestBuilder; - this.useQueryTermsFilterOptimization = requestBuilder.isUseTermFiltersOptimization(); - this.hashJoinComparisonStructure = new HashJoinComparisonStructure(requestBuilder.getT1ToT2FieldsComparison()); - this.alreadyMatched = new HashSet<>(); + private HashJoinElasticRequestBuilder requestBuilder; + + private Client client; + private boolean useQueryTermsFilterOptimization = false; + private final int MAX_RESULTS_FOR_FIRST_TABLE = 100000; + HashJoinComparisonStructure hashJoinComparisonStructure; + private Set alreadyMatched; + + public HashJoinElasticExecutor(Client client, HashJoinElasticRequestBuilder requestBuilder) { + super(requestBuilder); + this.client = client; + this.requestBuilder = requestBuilder; + this.useQueryTermsFilterOptimization = requestBuilder.isUseTermFiltersOptimization(); + this.hashJoinComparisonStructure = + new HashJoinComparisonStructure(requestBuilder.getT1ToT2FieldsComparison()); + this.alreadyMatched = new HashSet<>(); + } + + public List innerRun() throws IOException, SqlParseException { + + Map>> optimizationTermsFilterStructure = + initOptimizationStructure(); + + updateFirstTableLimitIfNeeded(); + TableInJoinRequestBuilder firstTableRequest = requestBuilder.getFirstTable(); + createKeyToResultsAndFillOptimizationStructure( + optimizationTermsFilterStructure, firstTableRequest); + + TableInJoinRequestBuilder secondTableRequest = requestBuilder.getSecondTable(); + if (needToOptimize(optimizationTermsFilterStructure)) { + updateRequestWithTermsFilter(optimizationTermsFilterStructure, secondTableRequest); } - public List innerRun() throws IOException, SqlParseException { - - Map>> optimizationTermsFilterStructure = initOptimizationStructure(); - - updateFirstTableLimitIfNeeded(); - TableInJoinRequestBuilder firstTableRequest = requestBuilder.getFirstTable(); - createKeyToResultsAndFillOptimizationStructure(optimizationTermsFilterStructure, firstTableRequest); - - TableInJoinRequestBuilder secondTableRequest = requestBuilder.getSecondTable(); - if (needToOptimize(optimizationTermsFilterStructure)) { - updateRequestWithTermsFilter(optimizationTermsFilterStructure, secondTableRequest); - } - - List combinedResult = createCombinedResults(secondTableRequest); - - int currentNumOfResults = combinedResult.size(); - int totalLimit = requestBuilder.getTotalLimit(); - if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN - && currentNumOfResults < totalLimit) { - String t1Alias = requestBuilder.getFirstTable().getAlias(); - String t2Alias = requestBuilder.getSecondTable().getAlias(); - //todo: for each till Limit - addUnmatchedResults(combinedResult, this.hashJoinComparisonStructure.getAllSearchHits(), - requestBuilder.getSecondTable().getReturnedFields(), - currentNumOfResults, totalLimit, - t1Alias, - t2Alias); - } - if (firstTableRequest.getOriginalSelect().isOrderdSelect()) { - Collections.sort(combinedResult, new Comparator() { - @Override - public int compare(SearchHit o1, SearchHit o2) { - return o1.docId() - o2.docId(); - } - }); - - } - return combinedResult; + List combinedResult = createCombinedResults(secondTableRequest); + + int currentNumOfResults = combinedResult.size(); + int totalLimit = requestBuilder.getTotalLimit(); + if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN + && currentNumOfResults < totalLimit) { + String t1Alias = requestBuilder.getFirstTable().getAlias(); + String t2Alias = requestBuilder.getSecondTable().getAlias(); + // todo: for each till Limit + addUnmatchedResults( + combinedResult, + this.hashJoinComparisonStructure.getAllSearchHits(), + requestBuilder.getSecondTable().getReturnedFields(), + currentNumOfResults, + totalLimit, + t1Alias, + t2Alias); } - - private Map>> initOptimizationStructure() { - Map>> optimizationTermsFilterStructure = new HashMap<>(); - for (String comparisonId : this.hashJoinComparisonStructure.getComparisons().keySet()) { - optimizationTermsFilterStructure.put(comparisonId, new HashMap>()); - } - return optimizationTermsFilterStructure; - } - - private void updateFirstTableLimitIfNeeded() { - if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { - Integer firstTableHintLimit = requestBuilder.getFirstTable().getHintLimit(); - int totalLimit = requestBuilder.getTotalLimit(); - if (firstTableHintLimit == null || firstTableHintLimit > totalLimit) { - requestBuilder.getFirstTable().setHintLimit(totalLimit); + if (firstTableRequest.getOriginalSelect().isOrderdSelect()) { + Collections.sort( + combinedResult, + new Comparator() { + @Override + public int compare(SearchHit o1, SearchHit o2) { + return o1.docId() - o2.docId(); } - } + }); } + return combinedResult; + } - private List createCombinedResults(TableInJoinRequestBuilder secondTableRequest) { - List combinedResult = new ArrayList<>(); - int resultIds = 0; - int totalLimit = this.requestBuilder.getTotalLimit(); - Integer hintLimit = secondTableRequest.getHintLimit(); - SearchResponse searchResponse; - boolean finishedScrolling; - if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { - searchResponse = secondTableRequest.getRequestBuilder().setSize(hintLimit).get(); - finishedScrolling = true; - } else { - searchResponse = secondTableRequest.getRequestBuilder() - .setScroll(new TimeValue(60000)) - .setSize(MAX_RESULTS_ON_ONE_FETCH).get(); - //es5.0 no need to scroll again! -// searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) -// .setScroll(new TimeValue(600000)).get(); - finishedScrolling = false; + private Map>> initOptimizationStructure() { + Map>> optimizationTermsFilterStructure = new HashMap<>(); + for (String comparisonId : this.hashJoinComparisonStructure.getComparisons().keySet()) { + optimizationTermsFilterStructure.put(comparisonId, new HashMap>()); + } + return optimizationTermsFilterStructure; + } + + private void updateFirstTableLimitIfNeeded() { + if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { + Integer firstTableHintLimit = requestBuilder.getFirstTable().getHintLimit(); + int totalLimit = requestBuilder.getTotalLimit(); + if (firstTableHintLimit == null || firstTableHintLimit > totalLimit) { + requestBuilder.getFirstTable().setHintLimit(totalLimit); + } + } + } + + private List createCombinedResults(TableInJoinRequestBuilder secondTableRequest) { + List combinedResult = new ArrayList<>(); + int resultIds = 0; + int totalLimit = this.requestBuilder.getTotalLimit(); + Integer hintLimit = secondTableRequest.getHintLimit(); + SearchResponse searchResponse; + boolean finishedScrolling; + if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { + searchResponse = secondTableRequest.getRequestBuilder().setSize(hintLimit).get(); + finishedScrolling = true; + } else { + searchResponse = + secondTableRequest + .getRequestBuilder() + .setScroll(new TimeValue(60000)) + .setSize(MAX_RESULTS_ON_ONE_FETCH) + .get(); + // es5.0 no need to scroll again! + // searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) + // .setScroll(new TimeValue(600000)).get(); + finishedScrolling = false; + } + updateMetaSearchResults(searchResponse); + + boolean limitReached = false; + int fetchedSoFarFromSecondTable = 0; + while (!limitReached) { + SearchHit[] secondTableHits = searchResponse.getHits().getHits(); + fetchedSoFarFromSecondTable += secondTableHits.length; + for (SearchHit secondTableHit : secondTableHits) { + if (limitReached) { + break; } - updateMetaSearchResults(searchResponse); - - boolean limitReached = false; - int fetchedSoFarFromSecondTable = 0; - while (!limitReached) { - SearchHit[] secondTableHits = searchResponse.getHits().getHits(); - fetchedSoFarFromSecondTable += secondTableHits.length; - for (SearchHit secondTableHit : secondTableHits) { - if (limitReached) { - break; - } - //todo: need to run on comparisons. for each comparison check if exists and add. - HashMap>> comparisons = - this.hashJoinComparisonStructure.getComparisons(); - - for (Map.Entry>> comparison : comparisons.entrySet()) { - String comparisonID = comparison.getKey(); - List> t1ToT2FieldsComparison = comparison.getValue(); - String key = getComparisonKey(t1ToT2FieldsComparison, secondTableHit, false, null); - - SearchHitsResult searchHitsResult = - this.hashJoinComparisonStructure.searchForMatchingSearchHits(comparisonID, key); - - if (searchHitsResult != null && searchHitsResult.getSearchHits().size() > 0) { - searchHitsResult.setMatchedWithOtherTable(true); - List searchHits = searchHitsResult.getSearchHits(); - for (SearchHit matchingHit : searchHits) { - String combinedId = matchingHit.getId() + "|" + secondTableHit.getId(); - //in order to prevent same matching when using OR on hashJoins. - if (this.alreadyMatched.contains(combinedId)) { - continue; - } else { - this.alreadyMatched.add(combinedId); - } - - Map copiedSource = new HashMap(); - copyMaps(copiedSource, secondTableHit.getSourceAsMap()); - onlyReturnedFields(copiedSource, secondTableRequest.getReturnedFields(), - secondTableRequest.getOriginalSelect().isSelectAll()); - - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - matchingHit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(matchingHit.docId(), combinedId, - documentFields, metaFields); - searchHit.sourceRef(matchingHit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(matchingHit.getSourceAsMap()); - String t1Alias = requestBuilder.getFirstTable().getAlias(); - String t2Alias = requestBuilder.getSecondTable().getAlias(); - mergeSourceAndAddAliases(copiedSource, searchHit, t1Alias, t2Alias); - - combinedResult.add(searchHit); - resultIds++; - if (resultIds >= totalLimit) { - limitReached = true; - break; - } - } - } - } - } - if (!finishedScrolling) { - if (secondTableHits.length > 0 && (hintLimit == null || fetchedSoFarFromSecondTable >= hintLimit)) { - searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - } else { - break; - } - } else { + // todo: need to run on comparisons. for each comparison check if exists and add. + HashMap>> comparisons = + this.hashJoinComparisonStructure.getComparisons(); + + for (Map.Entry>> comparison : comparisons.entrySet()) { + String comparisonID = comparison.getKey(); + List> t1ToT2FieldsComparison = comparison.getValue(); + String key = getComparisonKey(t1ToT2FieldsComparison, secondTableHit, false, null); + + SearchHitsResult searchHitsResult = + this.hashJoinComparisonStructure.searchForMatchingSearchHits(comparisonID, key); + + if (searchHitsResult != null && searchHitsResult.getSearchHits().size() > 0) { + searchHitsResult.setMatchedWithOtherTable(true); + List searchHits = searchHitsResult.getSearchHits(); + for (SearchHit matchingHit : searchHits) { + String combinedId = matchingHit.getId() + "|" + secondTableHit.getId(); + // in order to prevent same matching when using OR on hashJoins. + if (this.alreadyMatched.contains(combinedId)) { + continue; + } else { + this.alreadyMatched.add(combinedId); + } + + Map copiedSource = new HashMap(); + copyMaps(copiedSource, secondTableHit.getSourceAsMap()); + onlyReturnedFields( + copiedSource, + secondTableRequest.getReturnedFields(), + secondTableRequest.getOriginalSelect().isSelectAll()); + + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + matchingHit + .getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = + new SearchHit(matchingHit.docId(), combinedId, documentFields, metaFields); + searchHit.sourceRef(matchingHit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(matchingHit.getSourceAsMap()); + String t1Alias = requestBuilder.getFirstTable().getAlias(); + String t2Alias = requestBuilder.getSecondTable().getAlias(); + mergeSourceAndAddAliases(copiedSource, searchHit, t1Alias, t2Alias); + + combinedResult.add(searchHit); + resultIds++; + if (resultIds >= totalLimit) { + limitReached = true; break; + } } + } } - return combinedResult; - } - - private void copyMaps(Map into, Map from) { - for (Map.Entry keyAndValue : from.entrySet()) { - into.put(keyAndValue.getKey(), keyAndValue.getValue()); + } + if (!finishedScrolling) { + if (secondTableHits.length > 0 + && (hintLimit == null || fetchedSoFarFromSecondTable >= hintLimit)) { + searchResponse = + client + .prepareSearchScroll(searchResponse.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + } else { + break; } + } else { + break; + } } + return combinedResult; + } - private void createKeyToResultsAndFillOptimizationStructure( - Map>> optimizationTermsFilterStructure, - TableInJoinRequestBuilder firstTableRequest) { - List firstTableHits = fetchAllHits(firstTableRequest); - - int resultIds = 1; - for (SearchHit hit : firstTableHits) { - HashMap>> comparisons = - this.hashJoinComparisonStructure.getComparisons(); - for (Map.Entry>> comparison : comparisons.entrySet()) { - String comparisonID = comparison.getKey(); - List> t1ToT2FieldsComparison = comparison.getValue(); - - String key = getComparisonKey(t1ToT2FieldsComparison, hit, true, - optimizationTermsFilterStructure.get(comparisonID)); - - //int docid , id - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(resultIds, hit.getId(), documentFields - , metaFields); - searchHit.sourceRef(hit.getSourceRef()); - - onlyReturnedFields(searchHit.getSourceAsMap(), firstTableRequest.getReturnedFields(), - firstTableRequest.getOriginalSelect().isSelectAll()); - resultIds++; - this.hashJoinComparisonStructure.insertIntoComparisonHash(comparisonID, key, searchHit); - } - } + private void copyMaps(Map into, Map from) { + for (Map.Entry keyAndValue : from.entrySet()) { + into.put(keyAndValue.getKey(), keyAndValue.getValue()); } - - private List fetchAllHits(TableInJoinRequestBuilder tableInJoinRequest) { - Integer hintLimit = tableInJoinRequest.getHintLimit(); - SearchRequestBuilder requestBuilder = tableInJoinRequest.getRequestBuilder(); - if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { - requestBuilder.setSize(hintLimit); - SearchResponse searchResponse = requestBuilder.get(); - updateMetaSearchResults(searchResponse); - return Arrays.asList(searchResponse.getHits().getHits()); - } - return scrollTillLimit(tableInJoinRequest, hintLimit); + } + + private void createKeyToResultsAndFillOptimizationStructure( + Map>> optimizationTermsFilterStructure, + TableInJoinRequestBuilder firstTableRequest) { + List firstTableHits = fetchAllHits(firstTableRequest); + + int resultIds = 1; + for (SearchHit hit : firstTableHits) { + HashMap>> comparisons = + this.hashJoinComparisonStructure.getComparisons(); + for (Map.Entry>> comparison : comparisons.entrySet()) { + String comparisonID = comparison.getKey(); + List> t1ToT2FieldsComparison = comparison.getValue(); + + String key = + getComparisonKey( + t1ToT2FieldsComparison, + hit, + true, + optimizationTermsFilterStructure.get(comparisonID)); + + // int docid , id + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = new SearchHit(resultIds, hit.getId(), documentFields, metaFields); + searchHit.sourceRef(hit.getSourceRef()); + + onlyReturnedFields( + searchHit.getSourceAsMap(), + firstTableRequest.getReturnedFields(), + firstTableRequest.getOriginalSelect().isSelectAll()); + resultIds++; + this.hashJoinComparisonStructure.insertIntoComparisonHash(comparisonID, key, searchHit); + } + } + } + + private List fetchAllHits(TableInJoinRequestBuilder tableInJoinRequest) { + Integer hintLimit = tableInJoinRequest.getHintLimit(); + SearchRequestBuilder requestBuilder = tableInJoinRequest.getRequestBuilder(); + if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { + requestBuilder.setSize(hintLimit); + SearchResponse searchResponse = requestBuilder.get(); + updateMetaSearchResults(searchResponse); + return Arrays.asList(searchResponse.getHits().getHits()); } + return scrollTillLimit(tableInJoinRequest, hintLimit); + } - private List scrollTillLimit(TableInJoinRequestBuilder tableInJoinRequest, Integer hintLimit) { - SearchResponse scrollResp = scrollOneTimeWithMax(client, tableInJoinRequest); + private List scrollTillLimit( + TableInJoinRequestBuilder tableInJoinRequest, Integer hintLimit) { + SearchResponse scrollResp = scrollOneTimeWithMax(client, tableInJoinRequest); - updateMetaSearchResults(scrollResp); - List hitsWithScan = new ArrayList<>(); - int curentNumOfResults = 0; - SearchHit[] hits = scrollResp.getHits().getHits(); + updateMetaSearchResults(scrollResp); + List hitsWithScan = new ArrayList<>(); + int curentNumOfResults = 0; + SearchHit[] hits = scrollResp.getHits().getHits(); - if (hintLimit == null) { - hintLimit = MAX_RESULTS_FOR_FIRST_TABLE; - } - - while (hits.length != 0 && curentNumOfResults < hintLimit) { - curentNumOfResults += hits.length; - Collections.addAll(hitsWithScan, hits); - if (curentNumOfResults >= MAX_RESULTS_FOR_FIRST_TABLE) { - //todo: log or exception? - System.out.println("too many results for first table, stoping at:" + curentNumOfResults); - break; - } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)) - .execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - return hitsWithScan; + if (hintLimit == null) { + hintLimit = MAX_RESULTS_FOR_FIRST_TABLE; } - private boolean needToOptimize(Map>> optimizationTermsFilterStructure) { - if (!useQueryTermsFilterOptimization && optimizationTermsFilterStructure != null - && optimizationTermsFilterStructure.size() > 0) { - return false; - } - boolean allEmpty = true; - for (Map> optimization : optimizationTermsFilterStructure.values()) { - if (optimization.size() > 0) { - allEmpty = false; - break; - } - } - return !allEmpty; + while (hits.length != 0 && curentNumOfResults < hintLimit) { + curentNumOfResults += hits.length; + Collections.addAll(hitsWithScan, hits); + if (curentNumOfResults >= MAX_RESULTS_FOR_FIRST_TABLE) { + // todo: log or exception? + System.out.println("too many results for first table, stoping at:" + curentNumOfResults); + break; + } + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - - private void updateRequestWithTermsFilter(Map>> optimizationTermsFilterStructure, - TableInJoinRequestBuilder secondTableRequest) throws SqlParseException { - Select select = secondTableRequest.getOriginalSelect(); - - BoolQueryBuilder orQuery = QueryBuilders.boolQuery(); - for (Map> optimization : optimizationTermsFilterStructure.values()) { - BoolQueryBuilder andQuery = QueryBuilders.boolQuery(); - for (Map.Entry> keyToValues : optimization.entrySet()) { - String fieldName = keyToValues.getKey(); - List values = keyToValues.getValue(); - andQuery.must(QueryBuilders.termsQuery(fieldName, values)); - } - orQuery.should(andQuery); - } - - Where where = select.getWhere(); - - BoolQueryBuilder boolQuery; - if (where != null) { - boolQuery = QueryMaker.explain(where, false); - boolQuery.must(orQuery); - } else { - boolQuery = orQuery; - } - secondTableRequest.getRequestBuilder().setQuery(boolQuery); + return hitsWithScan; + } + + private boolean needToOptimize( + Map>> optimizationTermsFilterStructure) { + if (!useQueryTermsFilterOptimization + && optimizationTermsFilterStructure != null + && optimizationTermsFilterStructure.size() > 0) { + return false; + } + boolean allEmpty = true; + for (Map> optimization : optimizationTermsFilterStructure.values()) { + if (optimization.size() > 0) { + allEmpty = false; + break; + } + } + return !allEmpty; + } + + private void updateRequestWithTermsFilter( + Map>> optimizationTermsFilterStructure, + TableInJoinRequestBuilder secondTableRequest) + throws SqlParseException { + Select select = secondTableRequest.getOriginalSelect(); + + BoolQueryBuilder orQuery = QueryBuilders.boolQuery(); + for (Map> optimization : optimizationTermsFilterStructure.values()) { + BoolQueryBuilder andQuery = QueryBuilders.boolQuery(); + for (Map.Entry> keyToValues : optimization.entrySet()) { + String fieldName = keyToValues.getKey(); + List values = keyToValues.getValue(); + andQuery.must(QueryBuilders.termsQuery(fieldName, values)); + } + orQuery.should(andQuery); } - private String getComparisonKey(List> t1ToT2FieldsComparison, SearchHit hit, - boolean firstTable, Map> optimizationTermsFilterStructure) { - String key = ""; - Map sourceAsMap = hit.getSourceAsMap(); - for (Map.Entry t1ToT2 : t1ToT2FieldsComparison) { - //todo: change to our function find if key contains '.' - String name; - if (firstTable) { - name = t1ToT2.getKey().getName(); - } else { - name = t1ToT2.getValue().getName(); - } + Where where = select.getWhere(); - Object data = deepSearchInMap(sourceAsMap, name); - if (firstTable && useQueryTermsFilterOptimization) { - updateOptimizationData(optimizationTermsFilterStructure, data, t1ToT2.getValue().getName()); - } - if (data == null) { - key += "|null|"; - } else { - key += "|" + data.toString() + "|"; - } - } - return key; + BoolQueryBuilder boolQuery; + if (where != null) { + boolQuery = QueryMaker.explain(where, false); + boolQuery.must(orQuery); + } else { + boolQuery = orQuery; } - - private void updateOptimizationData(Map> optimizationTermsFilterStructure, - Object data, String queryOptimizationKey) { - List values = optimizationTermsFilterStructure.get(queryOptimizationKey); - if (values == null) { - values = new ArrayList<>(); - optimizationTermsFilterStructure.put(queryOptimizationKey, values); - } - if (data instanceof String) { - //todo: analyzed or not analyzed check.. - data = ((String) data).toLowerCase(); - } - if (data != null) { - values.add(data); - } + secondTableRequest.getRequestBuilder().setQuery(boolQuery); + } + + private String getComparisonKey( + List> t1ToT2FieldsComparison, + SearchHit hit, + boolean firstTable, + Map> optimizationTermsFilterStructure) { + String key = ""; + Map sourceAsMap = hit.getSourceAsMap(); + for (Map.Entry t1ToT2 : t1ToT2FieldsComparison) { + // todo: change to our function find if key contains '.' + String name; + if (firstTable) { + name = t1ToT2.getKey().getName(); + } else { + name = t1ToT2.getValue().getName(); + } + + Object data = deepSearchInMap(sourceAsMap, name); + if (firstTable && useQueryTermsFilterOptimization) { + updateOptimizationData(optimizationTermsFilterStructure, data, t1ToT2.getValue().getName()); + } + if (data == null) { + key += "|null|"; + } else { + key += "|" + data.toString() + "|"; + } + } + return key; + } + + private void updateOptimizationData( + Map> optimizationTermsFilterStructure, + Object data, + String queryOptimizationKey) { + List values = optimizationTermsFilterStructure.get(queryOptimizationKey); + if (values == null) { + values = new ArrayList<>(); + optimizationTermsFilterStructure.put(queryOptimizationKey, values); + } + if (data instanceof String) { + // todo: analyzed or not analyzed check.. + data = ((String) data).toLowerCase(); + } + if (data != null) { + values.add(data); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java index abdcf05751..a4174b7247 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java @@ -3,64 +3,60 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; -/** - * Created by Eliran on 4/9/2015. - */ +/** Created by Eliran on 4/9/2015. */ public class MetaSearchResult { - private long tookImMilli; - private int totalNumOfShards; - private int successfulShards; - private int failedShards; - private boolean isTimedOut; - - public MetaSearchResult() { - totalNumOfShards = 0; - failedShards = 0; - successfulShards = 0; - isTimedOut = false; - } - - public int getTotalNumOfShards() { - return totalNumOfShards; - } - - public int getSuccessfulShards() { - return successfulShards; - } - - public int getFailedShards() { - return failedShards; - } - - public boolean isTimedOut() { - return isTimedOut; - } - - public long getTookImMilli() { - return tookImMilli; - } - - public void setTookImMilli(long tookImMilli) { - this.tookImMilli = tookImMilli; - } - - public void addFailedShards(int shards) { - this.failedShards += shards; - } - - public void addSuccessfulShards(int shards) { - this.successfulShards += shards; - } - - public void addTotalNumOfShards(int shards) { - this.totalNumOfShards += shards; - } - - public void updateTimeOut(boolean isTimedOut) { - this.isTimedOut = this.isTimedOut || isTimedOut; - } - + private long tookImMilli; + private int totalNumOfShards; + private int successfulShards; + private int failedShards; + private boolean isTimedOut; + + public MetaSearchResult() { + totalNumOfShards = 0; + failedShards = 0; + successfulShards = 0; + isTimedOut = false; + } + + public int getTotalNumOfShards() { + return totalNumOfShards; + } + + public int getSuccessfulShards() { + return successfulShards; + } + + public int getFailedShards() { + return failedShards; + } + + public boolean isTimedOut() { + return isTimedOut; + } + + public long getTookImMilli() { + return tookImMilli; + } + + public void setTookImMilli(long tookImMilli) { + this.tookImMilli = tookImMilli; + } + + public void addFailedShards(int shards) { + this.failedShards += shards; + } + + public void addSuccessfulShards(int shards) { + this.successfulShards += shards; + } + + public void addTotalNumOfShards(int shards) { + this.totalNumOfShards += shards; + } + + public void updateTimeOut(boolean isTimedOut) { + this.isTimedOut = this.isTimedOut || isTimedOut; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java index 83901f1acb..03e16424e7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import java.util.ArrayList; @@ -36,409 +35,453 @@ import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; import org.opensearch.sql.legacy.utils.Util; - -/** - * Created by Eliran on 26/8/2016. - */ +/** Created by Eliran on 26/8/2016. */ public class MinusExecutor implements ElasticHitsExecutor { - private Client client; - private MultiQueryRequestBuilder builder; - private SearchHits minusHits; - private boolean useTermsOptimization; - private boolean termsOptimizationWithToLower; - private boolean useScrolling; - private int maxDocsToFetchOnFirstTable; - private int maxDocsToFetchOnSecondTable; - private int maxDocsToFetchOnEachScrollShard; - private String[] fieldsOrderFirstTable; - private String[] fieldsOrderSecondTable; - private String seperator; - - public MinusExecutor(Client client, MultiQueryRequestBuilder builder) { - this.client = client; - this.builder = builder; - this.useTermsOptimization = false; - this.termsOptimizationWithToLower = false; - this.useScrolling = false; - parseHintsIfAny(builder.getOriginalSelect(true).getHints()); - fillFieldsOrder(); - seperator = UUID.randomUUID().toString(); + private Client client; + private MultiQueryRequestBuilder builder; + private SearchHits minusHits; + private boolean useTermsOptimization; + private boolean termsOptimizationWithToLower; + private boolean useScrolling; + private int maxDocsToFetchOnFirstTable; + private int maxDocsToFetchOnSecondTable; + private int maxDocsToFetchOnEachScrollShard; + private String[] fieldsOrderFirstTable; + private String[] fieldsOrderSecondTable; + private String seperator; + + public MinusExecutor(Client client, MultiQueryRequestBuilder builder) { + this.client = client; + this.builder = builder; + this.useTermsOptimization = false; + this.termsOptimizationWithToLower = false; + this.useScrolling = false; + parseHintsIfAny(builder.getOriginalSelect(true).getHints()); + fillFieldsOrder(); + seperator = UUID.randomUUID().toString(); + } + + @Override + public void run() throws SqlParseException { + if (this.useTermsOptimization && this.fieldsOrderFirstTable.length != 1) { + throw new SqlParseException( + "Terms optimization failed: terms optimization for minus execution is supported with one" + + " field"); } - - @Override - public void run() throws SqlParseException { - if (this.useTermsOptimization && this.fieldsOrderFirstTable.length != 1) { - throw new SqlParseException( - "Terms optimization failed: terms optimization for minus execution is supported with one field"); - } - if (this.useTermsOptimization && !this.useScrolling) { - throw new SqlParseException( - "Terms optimization failed: using scrolling is required for terms optimization"); - } - if (!this.useScrolling || !this.useTermsOptimization) { - Set comperableHitResults; - if (!this.useScrolling) { - //1. get results from first search , put in set - //2. get reults from second search - //2.1 for each result remove from set - comperableHitResults = simpleOneTimeQueryEach(); - } else { - //if scrolling - //1. get all results in scrolls (till some limit) . put on set - //2. scroll on second table - //3. on each scroll result remove items from set - comperableHitResults = runWithScrollings(); - } - fillMinusHitsFromResults(comperableHitResults); - return; - } else { - //if scrolling and optimization - // 0. save the original second table where , init set - // 1. on each scroll on first table , create miniSet - //1.1 build where from all results (terms filter) , and run query - //1.1.1 on each result remove from miniSet - //1.1.2 add all results left from miniset to bigset - Select firstSelect = this.builder.getOriginalSelect(true); - MinusOneFieldAndOptimizationResult optimizationResult = - runWithScrollingAndAddFilter(fieldsOrderFirstTable[0], fieldsOrderSecondTable[0]); - String fieldName = getFieldName(firstSelect.getFields().get(0)); - Set results = optimizationResult.getFieldValues(); - SearchHit someHit = optimizationResult.getSomeHit(); - fillMinusHitsFromOneField(fieldName, results, someHit); - + if (this.useTermsOptimization && !this.useScrolling) { + throw new SqlParseException( + "Terms optimization failed: using scrolling is required for terms optimization"); + } + if (!this.useScrolling || !this.useTermsOptimization) { + Set comperableHitResults; + if (!this.useScrolling) { + // 1. get results from first search , put in set + // 2. get reults from second search + // 2.1 for each result remove from set + comperableHitResults = simpleOneTimeQueryEach(); + } else { + // if scrolling + // 1. get all results in scrolls (till some limit) . put on set + // 2. scroll on second table + // 3. on each scroll result remove items from set + comperableHitResults = runWithScrollings(); + } + fillMinusHitsFromResults(comperableHitResults); + return; + } else { + // if scrolling and optimization + // 0. save the original second table where , init set + // 1. on each scroll on first table , create miniSet + // 1.1 build where from all results (terms filter) , and run query + // 1.1.1 on each result remove from miniSet + // 1.1.2 add all results left from miniset to bigset + Select firstSelect = this.builder.getOriginalSelect(true); + MinusOneFieldAndOptimizationResult optimizationResult = + runWithScrollingAndAddFilter(fieldsOrderFirstTable[0], fieldsOrderSecondTable[0]); + String fieldName = getFieldName(firstSelect.getFields().get(0)); + Set results = optimizationResult.getFieldValues(); + SearchHit someHit = optimizationResult.getSomeHit(); + fillMinusHitsFromOneField(fieldName, results, someHit); + } + } + + @Override + public SearchHits getHits() { + return this.minusHits; + } + + private void fillMinusHitsFromOneField( + String fieldName, Set fieldValues, SearchHit someHit) { + List minusHitsList = new ArrayList<>(); + int currentId = 1; + for (Object result : fieldValues) { + Map fields = new HashMap<>(); + ArrayList values = new ArrayList<>(); + values.add(result); + fields.put(fieldName, new DocumentField(fieldName, values)); + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + someHit + .getFields() + .forEach( + (field, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(field) + ? metaFields + : documentFields) + .put(field, docField)); + SearchHit searchHit = new SearchHit(currentId, currentId + "", documentFields, metaFields); + searchHit.sourceRef(someHit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + Map sourceAsMap = new HashMap<>(); + sourceAsMap.put(fieldName, result); + searchHit.getSourceAsMap().putAll(sourceAsMap); + currentId++; + minusHitsList.add(searchHit); + } + int totalSize = currentId - 1; + SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); + this.minusHits = + new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + } + + private void fillMinusHitsFromResults(Set comperableHitResults) { + int currentId = 1; + List minusHitsList = new ArrayList<>(); + for (ComperableHitResult result : comperableHitResults) { + ArrayList values = new ArrayList<>(); + values.add(result); + SearchHit originalHit = result.getOriginalHit(); + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + originalHit + .getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = + new SearchHit(currentId, originalHit.getId(), documentFields, metaFields); + searchHit.sourceRef(originalHit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + Map sourceAsMap = result.getFlattenMap(); + for (Map.Entry entry : this.builder.getFirstTableFieldToAlias().entrySet()) { + if (sourceAsMap.containsKey(entry.getKey())) { + Object value = sourceAsMap.get(entry.getKey()); + sourceAsMap.remove(entry.getKey()); + sourceAsMap.put(entry.getValue(), value); } + } + searchHit.getSourceAsMap().putAll(sourceAsMap); + currentId++; + minusHitsList.add(searchHit); } - - - @Override - public SearchHits getHits() { - return this.minusHits; + int totalSize = currentId - 1; + SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); + this.minusHits = + new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + } + + private Set runWithScrollings() { + + SearchResponse scrollResp = + ElasticUtils.scrollOneTimeWithHits( + this.client, + this.builder.getFirstSearchRequest(), + builder.getOriginalSelect(true), + this.maxDocsToFetchOnEachScrollShard); + Set results = new HashSet<>(); + + SearchHit[] hits = scrollResp.getHits().getHits(); + if (hits == null || hits.length == 0) { + return new HashSet<>(); } - - private void fillMinusHitsFromOneField(String fieldName, Set fieldValues, SearchHit someHit) { - List minusHitsList = new ArrayList<>(); - int currentId = 1; - for (Object result : fieldValues) { - Map fields = new HashMap<>(); - ArrayList values = new ArrayList<>(); - values.add(result); - fields.put(fieldName, new DocumentField(fieldName, values)); - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - someHit.getFields().forEach((field, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(field) ? metaFields : documentFields).put(field, docField)); - SearchHit searchHit = new SearchHit(currentId, currentId + "", - documentFields, metaFields); - searchHit.sourceRef(someHit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - Map sourceAsMap = new HashMap<>(); - sourceAsMap.put(fieldName, result); - searchHit.getSourceAsMap().putAll(sourceAsMap); - currentId++; - minusHitsList.add(searchHit); - } - int totalSize = currentId - 1; - SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); - this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + int totalDocsFetchedFromFirstTable = 0; + + // fetch from first table . fill set. + while (hits != null && hits.length != 0) { + totalDocsFetchedFromFirstTable += hits.length; + fillComperableSetFromHits(this.fieldsOrderFirstTable, hits, results); + if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { + break; + } + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - - private void fillMinusHitsFromResults(Set comperableHitResults) { - int currentId = 1; - List minusHitsList = new ArrayList<>(); - for (ComperableHitResult result : comperableHitResults) { - ArrayList values = new ArrayList<>(); - values.add(result); - SearchHit originalHit = result.getOriginalHit(); - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - originalHit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(currentId, originalHit.getId(), - documentFields, metaFields); - searchHit.sourceRef(originalHit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - Map sourceAsMap = result.getFlattenMap(); - for (Map.Entry entry : this.builder.getFirstTableFieldToAlias().entrySet()) { - if (sourceAsMap.containsKey(entry.getKey())) { - Object value = sourceAsMap.get(entry.getKey()); - sourceAsMap.remove(entry.getKey()); - sourceAsMap.put(entry.getValue(), value); - } - } - - searchHit.getSourceAsMap().putAll(sourceAsMap); - currentId++; - minusHitsList.add(searchHit); - } - int totalSize = currentId - 1; - SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); - this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + scrollResp = + ElasticUtils.scrollOneTimeWithHits( + this.client, + this.builder.getSecondSearchRequest(), + builder.getOriginalSelect(false), + this.maxDocsToFetchOnEachScrollShard); + + hits = scrollResp.getHits().getHits(); + if (hits == null || hits.length == 0) { + return results; } - - private Set runWithScrollings() { - - SearchResponse scrollResp = ElasticUtils.scrollOneTimeWithHits(this.client, - this.builder.getFirstSearchRequest(), - builder.getOriginalSelect(true), this.maxDocsToFetchOnEachScrollShard); - Set results = new HashSet<>(); - - SearchHit[] hits = scrollResp.getHits().getHits(); - if (hits == null || hits.length == 0) { - return new HashSet<>(); - } - int totalDocsFetchedFromFirstTable = 0; - - //fetch from first table . fill set. - while (hits != null && hits.length != 0) { - totalDocsFetchedFromFirstTable += hits.length; - fillComperableSetFromHits(this.fieldsOrderFirstTable, hits, results); - if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { - break; - } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - scrollResp = ElasticUtils.scrollOneTimeWithHits(this.client, this.builder.getSecondSearchRequest(), - builder.getOriginalSelect(false), this.maxDocsToFetchOnEachScrollShard); - - - hits = scrollResp.getHits().getHits(); - if (hits == null || hits.length == 0) { - return results; - } - int totalDocsFetchedFromSecondTable = 0; - while (hits != null && hits.length != 0) { - totalDocsFetchedFromSecondTable += hits.length; - removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, results, hits); - if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { - break; - } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - - return results; + int totalDocsFetchedFromSecondTable = 0; + while (hits != null && hits.length != 0) { + totalDocsFetchedFromSecondTable += hits.length; + removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, results, hits); + if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { + break; + } + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - private Set simpleOneTimeQueryEach() { - SearchHit[] firstTableHits = this.builder.getFirstSearchRequest().get().getHits().getHits(); - if (firstTableHits == null || firstTableHits.length == 0) { - return new HashSet<>(); - } + return results; + } - Set result = new HashSet<>(); - fillComperableSetFromHits(this.fieldsOrderFirstTable, firstTableHits, result); - SearchHit[] secondTableHits = this.builder.getSecondSearchRequest().get().getHits().getHits(); - if (secondTableHits == null || secondTableHits.length == 0) { - return result; - } - removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, result, secondTableHits); - return result; + private Set simpleOneTimeQueryEach() { + SearchHit[] firstTableHits = this.builder.getFirstSearchRequest().get().getHits().getHits(); + if (firstTableHits == null || firstTableHits.length == 0) { + return new HashSet<>(); } - private void removeValuesFromSetAccordingToHits(String[] fieldsOrder, - Set set, SearchHit[] hits) { - for (SearchHit hit : hits) { - ComperableHitResult comperableHitResult = new ComperableHitResult(hit, fieldsOrder, this.seperator); - if (!comperableHitResult.isAllNull()) { - set.remove(comperableHitResult); - } - } + Set result = new HashSet<>(); + fillComperableSetFromHits(this.fieldsOrderFirstTable, firstTableHits, result); + SearchHit[] secondTableHits = this.builder.getSecondSearchRequest().get().getHits().getHits(); + if (secondTableHits == null || secondTableHits.length == 0) { + return result; } - - private void fillComperableSetFromHits(String[] fieldsOrder, SearchHit[] hits, Set setToFill) { - for (SearchHit hit : hits) { - ComperableHitResult comperableHitResult = new ComperableHitResult(hit, fieldsOrder, this.seperator); - if (!comperableHitResult.isAllNull()) { - setToFill.add(comperableHitResult); - } - } + removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, result, secondTableHits); + return result; + } + + private void removeValuesFromSetAccordingToHits( + String[] fieldsOrder, Set set, SearchHit[] hits) { + for (SearchHit hit : hits) { + ComperableHitResult comperableHitResult = + new ComperableHitResult(hit, fieldsOrder, this.seperator); + if (!comperableHitResult.isAllNull()) { + set.remove(comperableHitResult); + } } - - private String getFieldName(Field field) { - String alias = field.getAlias(); - if (alias != null && !alias.isEmpty()) { - return alias; - } - return field.getName(); + } + + private void fillComperableSetFromHits( + String[] fieldsOrder, SearchHit[] hits, Set setToFill) { + for (SearchHit hit : hits) { + ComperableHitResult comperableHitResult = + new ComperableHitResult(hit, fieldsOrder, this.seperator); + if (!comperableHitResult.isAllNull()) { + setToFill.add(comperableHitResult); + } } + } - private boolean checkIfOnlyOneField(Select firstSelect, Select secondSelect) { - return firstSelect.getFields().size() == 1 && secondSelect.getFields().size() == 1; + private String getFieldName(Field field) { + String alias = field.getAlias(); + if (alias != null && !alias.isEmpty()) { + return alias; } - - - // 0. save the original second table where , init set - // 1. on each scroll on first table , create miniSet - //1.1 build where from all results (terms filter) , and run query - //1.1.1 on each result remove from miniSet - //1.1.2 add all results left from miniset to bigset - private MinusOneFieldAndOptimizationResult runWithScrollingAndAddFilter(String firstFieldName, - String secondFieldName) - throws SqlParseException { - SearchResponse scrollResp = ElasticUtils.scrollOneTimeWithHits(this.client, - this.builder.getFirstSearchRequest(), - builder.getOriginalSelect(true), this.maxDocsToFetchOnEachScrollShard); - Set results = new HashSet<>(); - int currentNumOfResults = 0; - SearchHit[] hits = scrollResp.getHits().getHits(); - SearchHit someHit = null; - if (hits.length != 0) { - //we need some hit for creating InnerResults. - someHit = hits[0]; - } - int totalDocsFetchedFromFirstTable = 0; - int totalDocsFetchedFromSecondTable = 0; - Where originalWhereSecondTable = this.builder.getOriginalSelect(false).getWhere(); - while (hits.length != 0) { - totalDocsFetchedFromFirstTable += hits.length; - Set currentSetFromResults = new HashSet<>(); - fillSetFromHits(firstFieldName, hits, currentSetFromResults); - //fetch from second - Select secondQuerySelect = this.builder.getOriginalSelect(false); - Where where = createWhereWithOrigianlAndTermsFilter(secondFieldName, originalWhereSecondTable, - currentSetFromResults); - secondQuerySelect.setWhere(where); - DefaultQueryAction queryAction = new DefaultQueryAction(this.client, secondQuerySelect); - queryAction.explain(); - if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { - break; - } - SearchResponse responseForSecondTable = ElasticUtils.scrollOneTimeWithHits(this.client, - queryAction.getRequestBuilder(), secondQuerySelect, this.maxDocsToFetchOnEachScrollShard); - SearchHits secondQuerySearchHits = responseForSecondTable.getHits(); - - SearchHit[] secondQueryHits = secondQuerySearchHits.getHits(); - while (secondQueryHits.length > 0) { - totalDocsFetchedFromSecondTable += secondQueryHits.length; - removeValuesFromSetAccordingToHits(secondFieldName, currentSetFromResults, secondQueryHits); - if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { - break; - } - responseForSecondTable = client.prepareSearchScroll(responseForSecondTable.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - secondQueryHits = responseForSecondTable.getHits().getHits(); - } - results.addAll(currentSetFromResults); - if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { - System.out.println("too many results for first table, stoping at:" + totalDocsFetchedFromFirstTable); - break; - } - - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - return new MinusOneFieldAndOptimizationResult(results, someHit); - - + return field.getName(); + } + + private boolean checkIfOnlyOneField(Select firstSelect, Select secondSelect) { + return firstSelect.getFields().size() == 1 && secondSelect.getFields().size() == 1; + } + + // 0. save the original second table where , init set + // 1. on each scroll on first table , create miniSet + // 1.1 build where from all results (terms filter) , and run query + // 1.1.1 on each result remove from miniSet + // 1.1.2 add all results left from miniset to bigset + private MinusOneFieldAndOptimizationResult runWithScrollingAndAddFilter( + String firstFieldName, String secondFieldName) throws SqlParseException { + SearchResponse scrollResp = + ElasticUtils.scrollOneTimeWithHits( + this.client, + this.builder.getFirstSearchRequest(), + builder.getOriginalSelect(true), + this.maxDocsToFetchOnEachScrollShard); + Set results = new HashSet<>(); + int currentNumOfResults = 0; + SearchHit[] hits = scrollResp.getHits().getHits(); + SearchHit someHit = null; + if (hits.length != 0) { + // we need some hit for creating InnerResults. + someHit = hits[0]; } - - private void removeValuesFromSetAccordingToHits(String fieldName, Set setToRemoveFrom, SearchHit[] hits) { - for (SearchHit hit : hits) { - Object fieldValue = getFieldValue(hit, fieldName); - if (fieldValue != null) { - if (setToRemoveFrom.contains(fieldValue)) { - setToRemoveFrom.remove(fieldValue); - } - } + int totalDocsFetchedFromFirstTable = 0; + int totalDocsFetchedFromSecondTable = 0; + Where originalWhereSecondTable = this.builder.getOriginalSelect(false).getWhere(); + while (hits.length != 0) { + totalDocsFetchedFromFirstTable += hits.length; + Set currentSetFromResults = new HashSet<>(); + fillSetFromHits(firstFieldName, hits, currentSetFromResults); + // fetch from second + Select secondQuerySelect = this.builder.getOriginalSelect(false); + Where where = + createWhereWithOrigianlAndTermsFilter( + secondFieldName, originalWhereSecondTable, currentSetFromResults); + secondQuerySelect.setWhere(where); + DefaultQueryAction queryAction = new DefaultQueryAction(this.client, secondQuerySelect); + queryAction.explain(); + if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { + break; + } + SearchResponse responseForSecondTable = + ElasticUtils.scrollOneTimeWithHits( + this.client, + queryAction.getRequestBuilder(), + secondQuerySelect, + this.maxDocsToFetchOnEachScrollShard); + SearchHits secondQuerySearchHits = responseForSecondTable.getHits(); + + SearchHit[] secondQueryHits = secondQuerySearchHits.getHits(); + while (secondQueryHits.length > 0) { + totalDocsFetchedFromSecondTable += secondQueryHits.length; + removeValuesFromSetAccordingToHits(secondFieldName, currentSetFromResults, secondQueryHits); + if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { + break; } + responseForSecondTable = + client + .prepareSearchScroll(responseForSecondTable.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + secondQueryHits = responseForSecondTable.getHits().getHits(); + } + results.addAll(currentSetFromResults); + if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { + System.out.println( + "too many results for first table, stoping at:" + totalDocsFetchedFromFirstTable); + break; + } + + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - - private void fillSetFromHits(String fieldName, SearchHit[] hits, Set setToFill) { - for (SearchHit hit : hits) { - Object fieldValue = getFieldValue(hit, fieldName); - if (fieldValue != null) { - setToFill.add(fieldValue); - } + return new MinusOneFieldAndOptimizationResult(results, someHit); + } + + private void removeValuesFromSetAccordingToHits( + String fieldName, Set setToRemoveFrom, SearchHit[] hits) { + for (SearchHit hit : hits) { + Object fieldValue = getFieldValue(hit, fieldName); + if (fieldValue != null) { + if (setToRemoveFrom.contains(fieldValue)) { + setToRemoveFrom.remove(fieldValue); } + } } - - private Where createWhereWithOrigianlAndTermsFilter(String secondFieldName, Where originalWhereSecondTable, - Set currentSetFromResults) throws SqlParseException { - Where where = Where.newInstance(); - where.setConn(Where.CONN.AND); - where.addWhere(originalWhereSecondTable); - where.addWhere(buildTermsFilterFromResults(currentSetFromResults, secondFieldName)); - return where; + } + + private void fillSetFromHits(String fieldName, SearchHit[] hits, Set setToFill) { + for (SearchHit hit : hits) { + Object fieldValue = getFieldValue(hit, fieldName); + if (fieldValue != null) { + setToFill.add(fieldValue); + } } - - private Where buildTermsFilterFromResults(Set results, String fieldName) throws SqlParseException { - return new Condition(Where.CONN.AND, fieldName, null, Condition.OPERATOR.IN_TERMS, results.toArray(), null); + } + + private Where createWhereWithOrigianlAndTermsFilter( + String secondFieldName, Where originalWhereSecondTable, Set currentSetFromResults) + throws SqlParseException { + Where where = Where.newInstance(); + where.setConn(Where.CONN.AND); + where.addWhere(originalWhereSecondTable); + where.addWhere(buildTermsFilterFromResults(currentSetFromResults, secondFieldName)); + return where; + } + + private Where buildTermsFilterFromResults(Set results, String fieldName) + throws SqlParseException { + return new Condition( + Where.CONN.AND, fieldName, null, Condition.OPERATOR.IN_TERMS, results.toArray(), null); + } + + private Object getFieldValue(SearchHit hit, String fieldName) { + Map sourceAsMap = hit.getSourceAsMap(); + if (fieldName.contains(".")) { + String[] split = fieldName.split("\\."); + return Util.searchPathInMap(sourceAsMap, split); + } else if (sourceAsMap.containsKey(fieldName)) { + return sourceAsMap.get(fieldName); } - - private Object getFieldValue(SearchHit hit, String fieldName) { - Map sourceAsMap = hit.getSourceAsMap(); - if (fieldName.contains(".")) { - String[] split = fieldName.split("\\."); - return Util.searchPathInMap(sourceAsMap, split); - } else if (sourceAsMap.containsKey(fieldName)) { - return sourceAsMap.get(fieldName); - } - return null; + return null; + } + + private void fillFieldsOrder() { + List fieldsOrAliases = new ArrayList<>(); + Map firstTableFieldToAlias = this.builder.getFirstTableFieldToAlias(); + List firstTableFields = this.builder.getOriginalSelect(true).getFields(); + + for (Field field : firstTableFields) { + if (firstTableFieldToAlias.containsKey(field.getName())) { + fieldsOrAliases.add(field.getAlias()); + } else { + fieldsOrAliases.add(field.getName()); + } } - - private void fillFieldsOrder() { - List fieldsOrAliases = new ArrayList<>(); - Map firstTableFieldToAlias = this.builder.getFirstTableFieldToAlias(); - List firstTableFields = this.builder.getOriginalSelect(true).getFields(); - - for (Field field : firstTableFields) { - if (firstTableFieldToAlias.containsKey(field.getName())) { - fieldsOrAliases.add(field.getAlias()); - } else { - fieldsOrAliases.add(field.getName()); - } - } - Collections.sort(fieldsOrAliases); - - int fieldsSize = fieldsOrAliases.size(); - this.fieldsOrderFirstTable = new String[fieldsSize]; - fillFieldsArray(fieldsOrAliases, firstTableFieldToAlias, this.fieldsOrderFirstTable); - this.fieldsOrderSecondTable = new String[fieldsSize]; - fillFieldsArray(fieldsOrAliases, this.builder.getSecondTableFieldToAlias(), this.fieldsOrderSecondTable); + Collections.sort(fieldsOrAliases); + + int fieldsSize = fieldsOrAliases.size(); + this.fieldsOrderFirstTable = new String[fieldsSize]; + fillFieldsArray(fieldsOrAliases, firstTableFieldToAlias, this.fieldsOrderFirstTable); + this.fieldsOrderSecondTable = new String[fieldsSize]; + fillFieldsArray( + fieldsOrAliases, this.builder.getSecondTableFieldToAlias(), this.fieldsOrderSecondTable); + } + + private void fillFieldsArray( + List fieldsOrAliases, Map fieldsToAlias, String[] fields) { + Map aliasToField = inverseMap(fieldsToAlias); + for (int i = 0; i < fields.length; i++) { + String field = fieldsOrAliases.get(i); + if (aliasToField.containsKey(field)) { + field = aliasToField.get(field); + } + fields[i] = field; } + } - private void fillFieldsArray(List fieldsOrAliases, Map fieldsToAlias, String[] fields) { - Map aliasToField = inverseMap(fieldsToAlias); - for (int i = 0; i < fields.length; i++) { - String field = fieldsOrAliases.get(i); - if (aliasToField.containsKey(field)) { - field = aliasToField.get(field); - } - fields[i] = field; - } + private Map inverseMap(Map mapToInverse) { + Map inversedMap = new HashMap<>(); + for (Map.Entry entry : mapToInverse.entrySet()) { + inversedMap.put(entry.getValue(), entry.getKey()); } + return inversedMap; + } - private Map inverseMap(Map mapToInverse) { - Map inversedMap = new HashMap<>(); - for (Map.Entry entry : mapToInverse.entrySet()) { - inversedMap.put(entry.getValue(), entry.getKey()); - } - return inversedMap; + private void parseHintsIfAny(List hints) { + if (hints == null) { + return; } - - private void parseHintsIfAny(List hints) { - if (hints == null) { - return; - } - for (Hint hint : hints) { - if (hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION) { - Object[] params = hint.getParams(); - if (params != null && params.length == 1) { - this.termsOptimizationWithToLower = (boolean) params[0]; - } - } else if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { - Object[] params = hint.getParams(); - this.useScrolling = true; - this.maxDocsToFetchOnFirstTable = (int) params[0]; - this.maxDocsToFetchOnSecondTable = (int) params[1]; - this.maxDocsToFetchOnEachScrollShard = (int) params[2]; - } + for (Hint hint : hints) { + if (hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION) { + Object[] params = hint.getParams(); + if (params != null && params.length == 1) { + this.termsOptimizationWithToLower = (boolean) params[0]; } + } else if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { + Object[] params = hint.getParams(); + this.useScrolling = true; + this.maxDocsToFetchOnFirstTable = (int) params[0]; + this.maxDocsToFetchOnSecondTable = (int) params[1]; + this.maxDocsToFetchOnEachScrollShard = (int) params[2]; + } } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java index 3b4696bc1e..3d7206ab13 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java @@ -3,30 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import java.util.Set; import org.opensearch.search.SearchHit; - -/** - * Created by Eliran on 26/8/2016. - */ +/** Created by Eliran on 26/8/2016. */ class MinusOneFieldAndOptimizationResult { - private Set fieldValues; - private SearchHit someHit; + private Set fieldValues; + private SearchHit someHit; - MinusOneFieldAndOptimizationResult(Set fieldValues, SearchHit someHit) { - this.fieldValues = fieldValues; - this.someHit = someHit; - } + MinusOneFieldAndOptimizationResult(Set fieldValues, SearchHit someHit) { + this.fieldValues = fieldValues; + this.someHit = someHit; + } - public Set getFieldValues() { - return fieldValues; - } + public Set getFieldValues() { + return fieldValues; + } - public SearchHit getSomeHit() { - return someHit; - } + public SearchHit getSomeHit() { + return someHit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java index 239bc98772..03c6958076 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import com.alibaba.druid.sql.ast.statement.SQLUnionOperator; @@ -12,20 +11,19 @@ import org.opensearch.sql.legacy.executor.ElasticHitsExecutor; import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; -/** - * Created by Eliran on 21/8/2016. - */ +/** Created by Eliran on 21/8/2016. */ public class MultiRequestExecutorFactory { - public static ElasticHitsExecutor createExecutor(Client client, MultiQueryRequestBuilder builder) { - SQLUnionOperator relation = builder.getRelation(); - switch (relation) { - case UNION_ALL: - case UNION: - return new UnionExecutor(client, builder); - case MINUS: - return new MinusExecutor(client, builder); - default: - throw new SemanticAnalysisException("Unsupported operator: " + relation); - } + public static ElasticHitsExecutor createExecutor( + Client client, MultiQueryRequestBuilder builder) { + SQLUnionOperator relation = builder.getRelation(); + switch (relation) { + case UNION_ALL: + case UNION: + return new UnionExecutor(client, builder); + case MINUS: + return new MinusExecutor(client, builder); + default: + throw new SemanticAnalysisException("Unsupported operator: " + relation); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java index a858d99d3f..3a9ac5a66d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java @@ -3,21 +3,18 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core; - import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * The definition of the Expression. - */ +/** The definition of the Expression. */ public interface Expression { - /** - * Evaluate the result on the BindingTuple context. - * @param tuple BindingTuple - * @return ExprValue - */ - ExprValue valueOf(BindingTuple tuple); + /** + * Evaluate the result on the BindingTuple context. + * + * @param tuple BindingTuple + * @return ExprValue + */ + ExprValue valueOf(BindingTuple tuple); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java index cf5fd4627f..1df81e34b3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core; - import static org.opensearch.sql.legacy.expression.core.operator.ScalarOperation.ABS; import static org.opensearch.sql.legacy.expression.core.operator.ScalarOperation.ACOS; import static org.opensearch.sql.legacy.expression.core.operator.ScalarOperation.ADD; @@ -38,89 +36,80 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; - -/** - * The definition of Expression factory. - */ +/** The definition of Expression factory. */ public class ExpressionFactory { - private static final Map operationExpressionBuilderMap = - new ImmutableMap.Builder() - .put(ADD, ArithmeticFunctionFactory.add()) - .put(SUBTRACT, ArithmeticFunctionFactory.subtract()) - .put(MULTIPLY, ArithmeticFunctionFactory.multiply()) - .put(DIVIDE, ArithmeticFunctionFactory.divide()) - .put(MODULES, ArithmeticFunctionFactory.modules()) - .put(ABS, ArithmeticFunctionFactory.abs()) - .put(ACOS, ArithmeticFunctionFactory.acos()) - .put(ASIN, ArithmeticFunctionFactory.asin()) - .put(ATAN, ArithmeticFunctionFactory.atan()) - .put(ATAN2, ArithmeticFunctionFactory.atan2()) - .put(TAN, ArithmeticFunctionFactory.tan()) - .put(CBRT, ArithmeticFunctionFactory.cbrt()) - .put(CEIL, ArithmeticFunctionFactory.ceil()) - .put(COS, ArithmeticFunctionFactory.cos()) - .put(COSH, ArithmeticFunctionFactory.cosh()) - .put(EXP, ArithmeticFunctionFactory.exp()) - .put(FLOOR, ArithmeticFunctionFactory.floor()) - .put(LN, ArithmeticFunctionFactory.ln()) - .put(LOG, ArithmeticFunctionFactory.log()) - .put(LOG2, ArithmeticFunctionFactory.log2()) - .put(LOG10, ArithmeticFunctionFactory.log10()) - .build(); + private static final Map operationExpressionBuilderMap = + new ImmutableMap.Builder() + .put(ADD, ArithmeticFunctionFactory.add()) + .put(SUBTRACT, ArithmeticFunctionFactory.subtract()) + .put(MULTIPLY, ArithmeticFunctionFactory.multiply()) + .put(DIVIDE, ArithmeticFunctionFactory.divide()) + .put(MODULES, ArithmeticFunctionFactory.modules()) + .put(ABS, ArithmeticFunctionFactory.abs()) + .put(ACOS, ArithmeticFunctionFactory.acos()) + .put(ASIN, ArithmeticFunctionFactory.asin()) + .put(ATAN, ArithmeticFunctionFactory.atan()) + .put(ATAN2, ArithmeticFunctionFactory.atan2()) + .put(TAN, ArithmeticFunctionFactory.tan()) + .put(CBRT, ArithmeticFunctionFactory.cbrt()) + .put(CEIL, ArithmeticFunctionFactory.ceil()) + .put(COS, ArithmeticFunctionFactory.cos()) + .put(COSH, ArithmeticFunctionFactory.cosh()) + .put(EXP, ArithmeticFunctionFactory.exp()) + .put(FLOOR, ArithmeticFunctionFactory.floor()) + .put(LN, ArithmeticFunctionFactory.ln()) + .put(LOG, ArithmeticFunctionFactory.log()) + .put(LOG2, ArithmeticFunctionFactory.log2()) + .put(LOG10, ArithmeticFunctionFactory.log10()) + .build(); - public static Expression of(ScalarOperation op, List expressions) { - return operationExpressionBuilderMap.get(op).build(expressions); - } + public static Expression of(ScalarOperation op, List expressions) { + return operationExpressionBuilderMap.get(op).build(expressions); + } - /** - * Ref Expression. Define the binding name which could be resolved in {@link BindingTuple} - */ - public static Expression ref(String bindingName) { - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return tuple.resolve(bindingName); - } + /** Ref Expression. Define the binding name which could be resolved in {@link BindingTuple} */ + public static Expression ref(String bindingName) { + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return tuple.resolve(bindingName); + } - @Override - public String toString() { - return String.format("%s", bindingName); - } - }; - } + @Override + public String toString() { + return String.format("%s", bindingName); + } + }; + } - /** - * Literal Expression. - */ - public static Expression literal(ExprValue value) { - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return value; - } + /** Literal Expression. */ + public static Expression literal(ExprValue value) { + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return value; + } - @Override - public String toString() { - return String.format("%s", value); - } - }; - } + @Override + public String toString() { + return String.format("%s", value); + } + }; + } - /** - * Cast Expression. - */ - public static Expression cast(Expression expr) { - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return expr.valueOf(tuple); - } + /** Cast Expression. */ + public static Expression cast(Expression expr) { + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return expr.valueOf(tuple); + } - @Override - public String toString() { - return String.format("cast(%s)", expr); - } - }; - } + @Override + public String toString() { + return String.format("cast(%s)", expr); + } + }; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java index 5f2cbb5776..76744d7d34 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import java.util.List; import org.opensearch.sql.legacy.expression.core.Expression; -/** - * The definition of the {@link Expression} builder. - */ +/** The definition of the {@link Expression} builder. */ public interface ExpressionBuilder { - Expression build(List expressionList); + Expression build(List expressionList); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java index 50b1523497..ce7c1a8fca 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,23 +11,23 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprBooleanValue implements ExprValue { - private final Boolean value; + private final Boolean value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.BOOLEAN_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.BOOLEAN_VALUE; + } - @Override - public String toString() { - final StringBuffer sb = new StringBuffer("SSBooleanValue{"); - sb.append("value=").append(value); - sb.append('}'); - return sb.toString(); - } + @Override + public String toString() { + final StringBuffer sb = new StringBuffer("SSBooleanValue{"); + sb.append("value=").append(value); + sb.append('}'); + return sb.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java index 99eb35272d..f6200a2bea 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import static org.opensearch.sql.legacy.expression.model.ExprValue.ExprValueKind.COLLECTION_VALUE; @@ -16,22 +15,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprCollectionValue implements ExprValue { - private final List valueList; - - @Override - public Object value() { - return valueList; - } - - @Override - public ExprValueKind kind() { - return COLLECTION_VALUE; - } - - @Override - public String toString() { - return valueList.stream() - .map(Object::toString) - .collect(Collectors.joining(",", "[", "]")); - } + private final List valueList; + + @Override + public Object value() { + return valueList; + } + + @Override + public ExprValueKind kind() { + return COLLECTION_VALUE; + } + + @Override + public String toString() { + return valueList.stream().map(Object::toString).collect(Collectors.joining(",", "[", "]")); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java index fdfacc4c55..16c607cae5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprDoubleValue implements ExprValue { - private final Double value; + private final Double value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.DOUBLE_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.DOUBLE_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java index f4d4dfc1b3..478ca645a7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprFloatValue implements ExprValue { - private final Float value; + private final Float value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.DOUBLE_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.DOUBLE_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java index 3285934280..92c4d38a4c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprIntegerValue implements ExprValue { - private final Integer value; + private final Integer value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.INTEGER_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.INTEGER_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java index b50a0088db..4f96ecf89a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprLongValue implements ExprValue { - private final Long value; + private final Long value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.LONG_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.LONG_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java index e05e32b920..c2b70537c9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; -/** - * The definition of the missing value. - */ +/** The definition of the missing value. */ public class ExprMissingValue implements ExprValue { - @Override - public ExprValueKind kind() { - return ExprValueKind.MISSING_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.MISSING_VALUE; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java index dcdec6117f..3c93b82187 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprStringValue implements ExprValue { - private final String value; + private final String value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.STRING_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.STRING_VALUE; + } - @Override - public String toString() { - return value; - } + @Override + public String toString() { + return value; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java index 7debcef864..5f690a6b3e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import java.util.Map; @@ -14,23 +13,22 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprTupleValue implements ExprValue { - private final Map valueMap; + private final Map valueMap; - @Override - public Object value() { - return valueMap; - } + @Override + public Object value() { + return valueMap; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.TUPLE_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.TUPLE_VALUE; + } - @Override - public String toString() { - return valueMap.entrySet() - .stream() - .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) - .collect(Collectors.joining(",", "{", "}")); - } + @Override + public String toString() { + return valueMap.entrySet().stream() + .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",", "{", "}")); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java index d15cb39270..aae1973d4b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java @@ -3,31 +3,28 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; -/** - * The definition of the Expression Value. - */ +/** The definition of the Expression Value. */ public interface ExprValue { - default Object value() { - throw new IllegalStateException("invalid value operation on " + kind()); - } + default Object value() { + throw new IllegalStateException("invalid value operation on " + kind()); + } - default ExprValueKind kind() { - throw new IllegalStateException("invalid kind operation"); - } + default ExprValueKind kind() { + throw new IllegalStateException("invalid kind operation"); + } - enum ExprValueKind { - TUPLE_VALUE, - COLLECTION_VALUE, - MISSING_VALUE, + enum ExprValueKind { + TUPLE_VALUE, + COLLECTION_VALUE, + MISSING_VALUE, - BOOLEAN_VALUE, - INTEGER_VALUE, - DOUBLE_VALUE, - LONG_VALUE, - FLOAT_VALUE, - STRING_VALUE - } + BOOLEAN_VALUE, + INTEGER_VALUE, + DOUBLE_VALUE, + LONG_VALUE, + FLOAT_VALUE, + STRING_VALUE + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java index 5dc2b5b50a..28f4c70293 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import java.math.BigDecimal; @@ -12,62 +11,60 @@ import java.util.List; import java.util.Map; -/** - * The definition of {@link ExprValue} factory. - */ +/** The definition of {@link ExprValue} factory. */ public class ExprValueFactory { - public static ExprValue booleanValue(Boolean value) { - return new ExprBooleanValue(value); - } + public static ExprValue booleanValue(Boolean value) { + return new ExprBooleanValue(value); + } - public static ExprValue integerValue(Integer value) { - return new ExprIntegerValue(value); - } + public static ExprValue integerValue(Integer value) { + return new ExprIntegerValue(value); + } - public static ExprValue doubleValue(Double value) { - return new ExprDoubleValue(value); - } + public static ExprValue doubleValue(Double value) { + return new ExprDoubleValue(value); + } - public static ExprValue stringValue(String value) { - return new ExprStringValue(value); - } + public static ExprValue stringValue(String value) { + return new ExprStringValue(value); + } - public static ExprValue longValue(Long value) { - return new ExprLongValue(value); - } + public static ExprValue longValue(Long value) { + return new ExprLongValue(value); + } - public static ExprValue tupleValue(Map map) { - Map valueMap = new HashMap<>(); - map.forEach((k, v) -> valueMap.put(k, from(v))); - return new ExprTupleValue(valueMap); - } + public static ExprValue tupleValue(Map map) { + Map valueMap = new HashMap<>(); + map.forEach((k, v) -> valueMap.put(k, from(v))); + return new ExprTupleValue(valueMap); + } - public static ExprValue collectionValue(List list) { - List valueList = new ArrayList<>(); - list.forEach(o -> valueList.add(from(o))); - return new ExprCollectionValue(valueList); - } + public static ExprValue collectionValue(List list) { + List valueList = new ArrayList<>(); + list.forEach(o -> valueList.add(from(o))); + return new ExprCollectionValue(valueList); + } - public static ExprValue from(Object o) { - if (o instanceof Map) { - return tupleValue((Map) o); - } else if (o instanceof List) { - return collectionValue(((List) o)); - } else if (o instanceof Integer) { - return integerValue((Integer) o); - } else if (o instanceof Long) { - return longValue(((Long) o)); - } else if (o instanceof Boolean) { - return booleanValue((Boolean) o); - } else if (o instanceof Double) { - return doubleValue((Double) o); - } else if (o instanceof BigDecimal) { - return doubleValue(((BigDecimal) o).doubleValue()); - } else if (o instanceof String) { - return stringValue((String) o); - } else { - throw new IllegalStateException("unsupported type " + o.getClass()); - } + public static ExprValue from(Object o) { + if (o instanceof Map) { + return tupleValue((Map) o); + } else if (o instanceof List) { + return collectionValue(((List) o)); + } else if (o instanceof Integer) { + return integerValue((Integer) o); + } else if (o instanceof Long) { + return longValue(((Long) o)); + } else if (o instanceof Boolean) { + return booleanValue((Boolean) o); + } else if (o instanceof Double) { + return doubleValue((Double) o); + } else if (o instanceof BigDecimal) { + return doubleValue(((BigDecimal) o).doubleValue()); + } else if (o instanceof String) { + return stringValue((String) o); + } else { + throw new IllegalStateException("unsupported type " + o.getClass()); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java index 4688e74b6a..9873c72886 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import static org.opensearch.sql.legacy.expression.model.ExprValue.ExprValueKind.BOOLEAN_VALUE; @@ -15,64 +14,62 @@ import java.util.List; import java.util.Map; -/** - * The definition of ExprValue Utils. - */ +/** The definition of ExprValue Utils. */ public class ExprValueUtils { - public static Integer getIntegerValue(ExprValue exprValue) { - return getNumberValue(exprValue).intValue(); - } + public static Integer getIntegerValue(ExprValue exprValue) { + return getNumberValue(exprValue).intValue(); + } - public static Double getDoubleValue(ExprValue exprValue) { - return getNumberValue(exprValue).doubleValue(); - } + public static Double getDoubleValue(ExprValue exprValue) { + return getNumberValue(exprValue).doubleValue(); + } - public static Long getLongValue(ExprValue exprValue) { - return getNumberValue(exprValue).longValue(); - } + public static Long getLongValue(ExprValue exprValue) { + return getNumberValue(exprValue).longValue(); + } - public static Float getFloatValue(ExprValue exprValue) { - return getNumberValue(exprValue).floatValue(); - } + public static Float getFloatValue(ExprValue exprValue) { + return getNumberValue(exprValue).floatValue(); + } - public static String getStringValue(ExprValue exprValue) { - return convert(exprValue, STRING_VALUE); - } + public static String getStringValue(ExprValue exprValue) { + return convert(exprValue, STRING_VALUE); + } - public static List getCollectionValue(ExprValue exprValue) { - return convert(exprValue, COLLECTION_VALUE); - } + public static List getCollectionValue(ExprValue exprValue) { + return convert(exprValue, COLLECTION_VALUE); + } - public static Map getTupleValue(ExprValue exprValue) { - return convert(exprValue, TUPLE_VALUE); - } + public static Map getTupleValue(ExprValue exprValue) { + return convert(exprValue, TUPLE_VALUE); + } - public static Boolean getBooleanValue(ExprValue exprValue) { - return convert(exprValue, BOOLEAN_VALUE); - } + public static Boolean getBooleanValue(ExprValue exprValue) { + return convert(exprValue, BOOLEAN_VALUE); + } - @VisibleForTesting - public static Number getNumberValue(ExprValue exprValue) { - switch (exprValue.kind()) { - case INTEGER_VALUE: - case DOUBLE_VALUE: - case LONG_VALUE: - case FLOAT_VALUE: - return (Number) exprValue.value(); - default: - break; - } - throw new IllegalStateException( - String.format("invalid to get NUMBER_VALUE from expr type of %s", exprValue.kind())); + @VisibleForTesting + public static Number getNumberValue(ExprValue exprValue) { + switch (exprValue.kind()) { + case INTEGER_VALUE: + case DOUBLE_VALUE: + case LONG_VALUE: + case FLOAT_VALUE: + return (Number) exprValue.value(); + default: + break; } + throw new IllegalStateException( + String.format("invalid to get NUMBER_VALUE from expr type of %s", exprValue.kind())); + } - @SuppressWarnings("unchecked") - private static T convert(ExprValue exprValue, ExprValue.ExprValueKind toType) { - if (exprValue.kind() == toType) { - return (T) exprValue.value(); - } else { - throw new IllegalStateException( - String.format("invalid to get %s from expr type of %s", toType, exprValue.kind())); - } + @SuppressWarnings("unchecked") + private static T convert(ExprValue exprValue, ExprValue.ExprValueKind toType) { + if (exprValue.kind() == toType) { + return (T) exprValue.value(); + } else { + throw new IllegalStateException( + String.format("invalid to get %s from expr type of %s", toType, exprValue.kind())); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java index 5752927952..2f7c269351 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java @@ -3,29 +3,25 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.util.function.Supplier; -/** - * Gauge metric, an instant value like cpu usage, state and so on - */ +/** Gauge metric, an instant value like cpu usage, state and so on */ public class GaugeMetric extends Metric { - private Supplier loadValue; - - public GaugeMetric(String name, Supplier supplier) { - super(name); - this.loadValue = supplier; - } + private Supplier loadValue; - public String getName() { - return super.getName(); - } + public GaugeMetric(String name, Supplier supplier) { + super(name); + this.loadValue = supplier; + } - public T getValue() { - return loadValue.get(); - } + public String getName() { + return super.getName(); + } + public T getValue() { + return loadValue.get(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java index 9e31b0d9cd..956e0f558c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java @@ -3,23 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; public abstract class Metric implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private String name; + private static final long serialVersionUID = 1L; - public Metric(String name) { - this.name = name; - } + private String name; - public String getName() { - return name; - } + public Metric(String name) { + this.name = name; + } - public abstract T getValue(); + public String getName() { + return name; + } + public abstract T getValue(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java index 9319b77644..e4fbd173c9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java @@ -3,34 +3,33 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import org.opensearch.sql.legacy.query.join.BackOffRetryStrategy; public class MetricFactory { - public static Metric createMetric(MetricName name) { + public static Metric createMetric(MetricName name) { - switch (name) { - case REQ_TOTAL: - case DEFAULT_CURSOR_REQUEST_TOTAL: - case DEFAULT: - case PPL_REQ_TOTAL: - return new NumericMetric<>(name.getName(), new BasicCounter()); - case CIRCUIT_BREAKER: - return new GaugeMetric<>(name.getName(), BackOffRetryStrategy.GET_CB_STATE); - case REQ_COUNT_TOTAL: - case DEFAULT_CURSOR_REQUEST_COUNT_TOTAL: - case FAILED_REQ_COUNT_CUS: - case FAILED_REQ_COUNT_SYS: - case FAILED_REQ_COUNT_CB: - case PPL_REQ_COUNT_TOTAL: - case PPL_FAILED_REQ_COUNT_CUS: - case PPL_FAILED_REQ_COUNT_SYS: - return new NumericMetric<>(name.getName(), new RollingCounter()); - default: - return new NumericMetric<>(name.getName(), new BasicCounter()); - } + switch (name) { + case REQ_TOTAL: + case DEFAULT_CURSOR_REQUEST_TOTAL: + case DEFAULT: + case PPL_REQ_TOTAL: + return new NumericMetric<>(name.getName(), new BasicCounter()); + case CIRCUIT_BREAKER: + return new GaugeMetric<>(name.getName(), BackOffRetryStrategy.GET_CB_STATE); + case REQ_COUNT_TOTAL: + case DEFAULT_CURSOR_REQUEST_COUNT_TOTAL: + case FAILED_REQ_COUNT_CUS: + case FAILED_REQ_COUNT_SYS: + case FAILED_REQ_COUNT_CB: + case PPL_REQ_COUNT_TOTAL: + case PPL_FAILED_REQ_COUNT_CUS: + case PPL_FAILED_REQ_COUNT_SYS: + return new NumericMetric<>(name.getName(), new RollingCounter()); + default: + return new NumericMetric<>(name.getName(), new BasicCounter()); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java index 16a719b97e..1c895f5d69 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import com.google.common.collect.ImmutableSet; @@ -13,51 +12,55 @@ import java.util.stream.Collectors; public enum MetricName { + REQ_TOTAL("request_total"), + REQ_COUNT_TOTAL("request_count"), + FAILED_REQ_COUNT_SYS("failed_request_count_syserr"), + FAILED_REQ_COUNT_CUS("failed_request_count_cuserr"), + FAILED_REQ_COUNT_CB("failed_request_count_cb"), + DEFAULT_CURSOR_REQUEST_TOTAL("default_cursor_request_total"), + DEFAULT_CURSOR_REQUEST_COUNT_TOTAL("default_cursor_request_count"), + CIRCUIT_BREAKER("circuit_breaker"), + DEFAULT("default"), + + PPL_REQ_TOTAL("ppl_request_total"), + PPL_REQ_COUNT_TOTAL("ppl_request_count"), + PPL_FAILED_REQ_COUNT_SYS("ppl_failed_request_count_syserr"), + PPL_FAILED_REQ_COUNT_CUS("ppl_failed_request_count_cuserr"), + DATASOURCE_REQ_COUNT("datasource_request_count"), + DATASOURCE_FAILED_REQ_COUNT_SYS("datasource_failed_request_count_syserr"), + DATASOURCE_FAILED_REQ_COUNT_CUS("datasource_failed_request_count_cuserr"); + + private String name; + + MetricName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public static List getNames() { + return Arrays.stream(MetricName.values()).map(v -> v.name).collect(Collectors.toList()); + } + + private static Set NUMERICAL_METRIC = + new ImmutableSet.Builder() + .add(PPL_REQ_TOTAL) + .add(PPL_REQ_COUNT_TOTAL) + .add(PPL_FAILED_REQ_COUNT_SYS) + .add(PPL_FAILED_REQ_COUNT_CUS) + .build(); - REQ_TOTAL("request_total"), - REQ_COUNT_TOTAL("request_count"), - FAILED_REQ_COUNT_SYS("failed_request_count_syserr"), - FAILED_REQ_COUNT_CUS("failed_request_count_cuserr"), - FAILED_REQ_COUNT_CB("failed_request_count_cb"), - DEFAULT_CURSOR_REQUEST_TOTAL("default_cursor_request_total"), - DEFAULT_CURSOR_REQUEST_COUNT_TOTAL("default_cursor_request_count"), - CIRCUIT_BREAKER("circuit_breaker"), - DEFAULT("default"), - - PPL_REQ_TOTAL("ppl_request_total"), - PPL_REQ_COUNT_TOTAL("ppl_request_count"), - PPL_FAILED_REQ_COUNT_SYS("ppl_failed_request_count_syserr"), - PPL_FAILED_REQ_COUNT_CUS("ppl_failed_request_count_cuserr"), - DATASOURCE_REQ_COUNT("datasource_request_count"), - DATASOURCE_FAILED_REQ_COUNT_SYS("datasource_failed_request_count_syserr"), - DATASOURCE_FAILED_REQ_COUNT_CUS("datasource_failed_request_count_cuserr"); - - private String name; - - MetricName(String name) { - this.name = name; - } - - public String getName() { - return name; - } - - public static List getNames() { - return Arrays.stream(MetricName.values()).map(v -> v.name).collect(Collectors.toList()); - } - - - private static Set NUMERICAL_METRIC = new ImmutableSet.Builder() - .add(PPL_REQ_TOTAL) - .add(PPL_REQ_COUNT_TOTAL) - .add(PPL_FAILED_REQ_COUNT_SYS) - .add(PPL_FAILED_REQ_COUNT_CUS) - .build(); - - public boolean isNumerical() { - return this == REQ_TOTAL || this == REQ_COUNT_TOTAL || this == FAILED_REQ_COUNT_SYS - || this == FAILED_REQ_COUNT_CUS || this == FAILED_REQ_COUNT_CB || this == DEFAULT - || this == DEFAULT_CURSOR_REQUEST_TOTAL || this == DEFAULT_CURSOR_REQUEST_COUNT_TOTAL - || NUMERICAL_METRIC.contains(this); - } + public boolean isNumerical() { + return this == REQ_TOTAL + || this == REQ_COUNT_TOTAL + || this == FAILED_REQ_COUNT_SYS + || this == FAILED_REQ_COUNT_CUS + || this == FAILED_REQ_COUNT_CB + || this == DEFAULT + || this == DEFAULT_CURSOR_REQUEST_TOTAL + || this == DEFAULT_CURSOR_REQUEST_COUNT_TOTAL + || NUMERICAL_METRIC.contains(this); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java index e53dfa6804..858f9e5cef 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.util.ArrayList; @@ -13,69 +12,68 @@ public class Metrics { - private static Metrics metrics = new Metrics(); - private ConcurrentHashMap registeredMetricsByName = new ConcurrentHashMap<>(); - - public static Metrics getInstance() { - return metrics; - } + private static Metrics metrics = new Metrics(); + private ConcurrentHashMap registeredMetricsByName = new ConcurrentHashMap<>(); - private Metrics() { - } + public static Metrics getInstance() { + return metrics; + } - public void registerDefaultMetrics() { - for (MetricName metricName : MetricName.values()) { - registerMetric(MetricFactory.createMetric(metricName)); - } - } + private Metrics() {} - public void registerMetric(Metric metric) { - registeredMetricsByName.put(metric.getName(), metric); + public void registerDefaultMetrics() { + for (MetricName metricName : MetricName.values()) { + registerMetric(MetricFactory.createMetric(metricName)); } + } - public void unregisterMetric(String name) { - if (name == null) { - return; - } + public void registerMetric(Metric metric) { + registeredMetricsByName.put(metric.getName(), metric); + } - registeredMetricsByName.remove(name); + public void unregisterMetric(String name) { + if (name == null) { + return; } - public Metric getMetric(String name) { - if (name == null) { - return null; - } + registeredMetricsByName.remove(name); + } - return registeredMetricsByName.get(name); + public Metric getMetric(String name) { + if (name == null) { + return null; } - public NumericMetric getNumericalMetric(MetricName metricName) { - String name = metricName.getName(); - if (!metricName.isNumerical()) { - name = MetricName.DEFAULT.getName(); - } + return registeredMetricsByName.get(name); + } - return (NumericMetric) registeredMetricsByName.get(name); + public NumericMetric getNumericalMetric(MetricName metricName) { + String name = metricName.getName(); + if (!metricName.isNumerical()) { + name = MetricName.DEFAULT.getName(); } - public List getAllMetrics() { - return new ArrayList<>(registeredMetricsByName.values()); - } + return (NumericMetric) registeredMetricsByName.get(name); + } - public String collectToJSON() { - JSONObject metricsJSONObject = new JSONObject(); + public List getAllMetrics() { + return new ArrayList<>(registeredMetricsByName.values()); + } - for (Metric metric : registeredMetricsByName.values()) { - if (metric.getName().equals("default")) { - continue; - } - metricsJSONObject.put(metric.getName(), metric.getValue()); - } + public String collectToJSON() { + JSONObject metricsJSONObject = new JSONObject(); - return metricsJSONObject.toString(); + for (Metric metric : registeredMetricsByName.values()) { + if (metric.getName().equals("default")) { + continue; + } + metricsJSONObject.put(metric.getName(), metric.getValue()); } - public void clear() { - registeredMetricsByName.clear(); - } + return metricsJSONObject.toString(); + } + + public void clear() { + registeredMetricsByName.clear(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java index 5f6d03f0ac..be9c2f9652 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import static org.opensearch.sql.legacy.utils.StringUtils.isQuoted; @@ -53,987 +52,1002 @@ import com.alibaba.druid.util.JdbcConstants; import java.util.List; -/** - * Created by Eliran on 18/8/2015. - */ +/** Created by Eliran on 18/8/2015. */ public class ElasticSqlExprParser extends SQLExprParser { - public ElasticSqlExprParser(Lexer lexer) { - super(lexer); - this.aggregateFunctions = AGGREGATE_FUNCTIONS; + public ElasticSqlExprParser(Lexer lexer) { + super(lexer); + this.aggregateFunctions = AGGREGATE_FUNCTIONS; + } + + public ElasticSqlExprParser(String sql) { + this(new ElasticLexer(sql)); + this.lexer.nextToken(); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + @Override + public void parseHints(List hints) { + while (lexer.token() == Token.HINT) { + hints.add(new SQLCommentHint(lexer.stringVal())); + lexer.nextToken(); } + } - public ElasticSqlExprParser(String sql) { - this(new ElasticLexer(sql)); - this.lexer.nextToken(); + @Override + protected SQLExpr methodRest(SQLExpr expr, boolean acceptLPAREN) { + if (acceptLPAREN) { + accept(Token.LPAREN); } - @SuppressWarnings({"unchecked", "rawtypes"}) - @Override - public void parseHints(List hints) { - while (lexer.token() == Token.HINT) { - hints.add(new SQLCommentHint(lexer.stringVal())); - lexer.nextToken(); - } + if (expr instanceof SQLName || expr instanceof SQLDefaultExpr) { + String methodName; + + SQLMethodInvokeExpr methodInvokeExpr; + if (expr instanceof SQLPropertyExpr) { + methodName = ((SQLPropertyExpr) expr).getName(); + methodInvokeExpr = new SQLMethodInvokeExpr(methodName); + methodInvokeExpr.setOwner(((SQLPropertyExpr) expr).getOwner()); + } else { + methodName = expr.toString(); + methodInvokeExpr = new SQLMethodInvokeExpr(methodName); + } + + if (isAggreateFunction(methodName)) { + SQLAggregateExpr aggregateExpr = parseAggregateExpr(methodName); + + return aggregateExpr; + } + + if (lexer.token() != Token.RPAREN) { + exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); + } + + accept(Token.RPAREN); + + return primaryRest(methodInvokeExpr); } - @Override - protected SQLExpr methodRest(SQLExpr expr, boolean acceptLPAREN) { - if (acceptLPAREN) { - accept(Token.LPAREN); + throw new ParserException("Syntax error: " + lexer.token()); + } + + public SQLExpr primary() { + + if (lexer.token() == Token.LBRACE) { + lexer.nextToken(); + boolean foundRBrace = false; + if (lexer.stringVal().equals("ts")) { + String current = lexer.stringVal(); + do { + if (current.equals(lexer.token().RBRACE.name())) { + foundRBrace = true; + break; + } + lexer.nextToken(); + current = lexer.token().name(); + } while (!foundRBrace && !current.trim().equals("")); + + if (foundRBrace) { + SQLOdbcExpr sdle = new SQLOdbcExpr(lexer.stringVal()); + + accept(Token.RBRACE); + return sdle; + } else { + throw new ParserException("Error. Unable to find closing RBRACE"); } + } else { + throw new ParserException("Error. Unable to parse ODBC Literal Timestamp"); + } + } else if (lexer.token() == Token.LBRACKET) { + StringBuilder identifier = new StringBuilder(); + lexer.nextToken(); + String prefix = ""; + while (lexer.token() != Token.RBRACKET) { + if (lexer.token() != Token.IDENTIFIER + && lexer.token() != Token.INDEX + && lexer.token() != Token.LITERAL_CHARS) { + throw new ParserException( + "All items between Brackets should be identifiers , got:" + lexer.token()); + } + identifier.append(prefix); + identifier.append(lexer.stringVal()); + prefix = " "; + lexer.nextToken(); + } + + accept(Token.RBRACKET); + return new SQLIdentifierExpr(identifier.toString()); + } else if (lexer.token() == Token.NOT) { + lexer.nextToken(); + SQLExpr sqlExpr; + if (lexer.token() == Token.EXISTS) { + lexer.nextToken(); + accept(Token.LPAREN); + sqlExpr = new SQLExistsExpr(createSelectParser().select(), true); + accept(Token.RPAREN); + } else if (lexer.token() == Token.LPAREN) { + lexer.nextToken(); - if (expr instanceof SQLName || expr instanceof SQLDefaultExpr) { - String methodName; - - SQLMethodInvokeExpr methodInvokeExpr; - if (expr instanceof SQLPropertyExpr) { - methodName = ((SQLPropertyExpr) expr).getName(); - methodInvokeExpr = new SQLMethodInvokeExpr(methodName); - methodInvokeExpr.setOwner(((SQLPropertyExpr) expr).getOwner()); - } else { - methodName = expr.toString(); - methodInvokeExpr = new SQLMethodInvokeExpr(methodName); - } + SQLExpr notTarget = expr(); - if (isAggreateFunction(methodName)) { - SQLAggregateExpr aggregateExpr = parseAggregateExpr(methodName); + accept(Token.RPAREN); - return aggregateExpr; - } + sqlExpr = new SQLNotExpr(notTarget); - if (lexer.token() != Token.RPAREN) { - exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); - } + return primaryRest(sqlExpr); + } else { + SQLExpr restExpr = relational(); + sqlExpr = new SQLNotExpr(restExpr); + } + return sqlExpr; + } - accept(Token.RPAREN); + boolean parenWrapped = lexer.token() == Token.LPAREN; - return primaryRest(methodInvokeExpr); - } + SQLExpr expr = primary2(); - throw new ParserException("Syntax error: " + lexer.token()); + // keep track of if the identifier is wrapped in parens + if (parenWrapped && expr instanceof SQLIdentifierExpr) { + expr = new SQLParensIdentifierExpr((SQLIdentifierExpr) expr); } + return expr; + } - public SQLExpr primary() { + public static String[] AGGREGATE_FUNCTIONS = { + "AVG", "COUNT", "GROUP_CONCAT", "MAX", "MIN", "STDDEV", "SUM" + }; - if (lexer.token() == Token.LBRACE) { - lexer.nextToken(); - boolean foundRBrace = false; - if (lexer.stringVal().equals("ts")) { - String current = lexer.stringVal(); - do { - if (current.equals(lexer.token().RBRACE.name())) { - foundRBrace = true; - break; - } - lexer.nextToken(); - current = lexer.token().name(); - } while (!foundRBrace && !current.trim().equals("")); - - if (foundRBrace) { - SQLOdbcExpr sdle = new SQLOdbcExpr(lexer.stringVal()); - - accept(Token.RBRACE); - return sdle; - } else { - throw new ParserException("Error. Unable to find closing RBRACE"); - } - } else { - throw new ParserException("Error. Unable to parse ODBC Literal Timestamp"); - } - } else if (lexer.token() == Token.LBRACKET) { - StringBuilder identifier = new StringBuilder(); - lexer.nextToken(); - String prefix = ""; - while (lexer.token() != Token.RBRACKET) { - if (lexer.token() != Token.IDENTIFIER && lexer.token() != Token.INDEX - && lexer.token() != Token.LITERAL_CHARS) { - throw new ParserException("All items between Brackets should be identifiers , got:" - + lexer.token()); - } - identifier.append(prefix); - identifier.append(lexer.stringVal()); - prefix = " "; - lexer.nextToken(); - } + public SQLExpr relationalRest(SQLExpr expr) { + if (identifierEquals("REGEXP")) { + lexer.nextToken(); + SQLExpr rightExp = equality(); - accept(Token.RBRACKET); - return new SQLIdentifierExpr(identifier.toString()); - } else if (lexer.token() == Token.NOT) { - lexer.nextToken(); - SQLExpr sqlExpr; - if (lexer.token() == Token.EXISTS) { - lexer.nextToken(); - accept(Token.LPAREN); - sqlExpr = new SQLExistsExpr(createSelectParser().select(), true); - accept(Token.RPAREN); - } else if (lexer.token() == Token.LPAREN) { - lexer.nextToken(); + rightExp = relationalRest(rightExp); - SQLExpr notTarget = expr(); + return new SQLBinaryOpExpr(expr, SQLBinaryOperator.RegExp, rightExp, JdbcConstants.MYSQL); + } - accept(Token.RPAREN); + return super.relationalRest(expr); + } - sqlExpr = new SQLNotExpr(notTarget); + public SQLExpr multiplicativeRest(SQLExpr expr) { + if (lexer.token() == Token.IDENTIFIER && "MOD".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); + SQLExpr rightExp = primary(); - return primaryRest(sqlExpr); - } else { - SQLExpr restExpr = relational(); - sqlExpr = new SQLNotExpr(restExpr); - } - return sqlExpr; - } + rightExp = relationalRest(rightExp); + + return new SQLBinaryOpExpr(expr, SQLBinaryOperator.Modulus, rightExp, JdbcConstants.MYSQL); + } - boolean parenWrapped = lexer.token() == Token.LPAREN; + return super.multiplicativeRest(expr); + } - SQLExpr expr = primary2(); + public SQLExpr notRationalRest(SQLExpr expr) { + if (identifierEquals("REGEXP")) { + lexer.nextToken(); + SQLExpr rightExp = primary(); - // keep track of if the identifier is wrapped in parens - if (parenWrapped && expr instanceof SQLIdentifierExpr) { - expr = new SQLParensIdentifierExpr((SQLIdentifierExpr) expr); - } + rightExp = relationalRest(rightExp); - return expr; + return new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotRegExp, rightExp, JdbcConstants.MYSQL); } - public static String[] AGGREGATE_FUNCTIONS = {"AVG", "COUNT", "GROUP_CONCAT", "MAX", "MIN", "STDDEV", "SUM"}; + return super.notRationalRest(expr); + } + public SQLExpr primary2() { + final Token tok = lexer.token(); - public SQLExpr relationalRest(SQLExpr expr) { - if (identifierEquals("REGEXP")) { - lexer.nextToken(); - SQLExpr rightExp = equality(); + if (identifierEquals("outfile")) { + lexer.nextToken(); + SQLExpr file = primary(); + SQLExpr expr = new MySqlOutFileExpr(file); - rightExp = relationalRest(rightExp); + return primaryRest(expr); + } - return new SQLBinaryOpExpr(expr, SQLBinaryOperator.RegExp, rightExp, JdbcConstants.MYSQL); + switch (tok) { + case LITERAL_ALIAS: + String aliasValue = lexer.stringVal(); + lexer.nextToken(); + return primaryRest(new SQLCharExpr(aliasValue)); + case VARIANT: + SQLVariantRefExpr varRefExpr = new SQLVariantRefExpr(lexer.stringVal()); + lexer.nextToken(); + if (varRefExpr.getName().equalsIgnoreCase("@@global")) { + accept(Token.DOT); + varRefExpr = new SQLVariantRefExpr(lexer.stringVal(), true); + lexer.nextToken(); + } else if (varRefExpr.getName().equals("@") && lexer.token() == Token.LITERAL_CHARS) { + varRefExpr.setName("@'" + lexer.stringVal() + "'"); + lexer.nextToken(); + } else if (varRefExpr.getName().equals("@@") && lexer.token() == Token.LITERAL_CHARS) { + varRefExpr.setName("@@'" + lexer.stringVal() + "'"); + lexer.nextToken(); + } + return primaryRest(varRefExpr); + case VALUES: + lexer.nextToken(); + if (lexer.token() != Token.LPAREN) { + throw new ParserException("Syntax error: " + lexer.token()); + } + return this.methodRest(new SQLIdentifierExpr("VALUES"), true); + case BINARY: + lexer.nextToken(); + if (lexer.token() == Token.COMMA + || lexer.token() == Token.SEMI + || lexer.token() == Token.EOF) { + return new SQLIdentifierExpr("BINARY"); + } else { + SQLUnaryExpr binaryExpr = new SQLUnaryExpr(SQLUnaryOperator.BINARY, expr()); + return primaryRest(binaryExpr); } + case CACHE: + case GROUP: + lexer.nextToken(); + return primaryRest(new SQLIdentifierExpr(lexer.stringVal())); + case DOT: + lexer.nextToken(); + return primaryRest(new SQLIdentifierExpr("." + lexer.stringVal())); + default: + return super.primary(); + } + } - return super.relationalRest(expr); + public final SQLExpr primaryRest(SQLExpr expr) { + if (expr == null) { + throw new IllegalArgumentException("Illegal expression: NULL"); } - public SQLExpr multiplicativeRest(SQLExpr expr) { - if (lexer.token() == Token.IDENTIFIER && "MOD".equalsIgnoreCase(lexer.stringVal())) { + if (lexer.token() == Token.LITERAL_CHARS) { + if (expr instanceof SQLIdentifierExpr) { + SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; + String ident = identExpr.getName(); + + if (ident.equalsIgnoreCase("x")) { + String charValue = lexer.stringVal(); + lexer.nextToken(); + expr = new SQLHexExpr(charValue); + + return primaryRest(expr); + } else if (ident.equalsIgnoreCase("b")) { + String charValue = lexer.stringVal(); + lexer.nextToken(); + expr = new SQLBinaryExpr(charValue); + + return primaryRest(expr); + } else if (ident.startsWith("_")) { + String charValue = lexer.stringVal(); + lexer.nextToken(); + + MySqlCharExpr mysqlCharExpr = new MySqlCharExpr(charValue); + mysqlCharExpr.setCharset(identExpr.getName()); + if (identifierEquals("COLLATE")) { lexer.nextToken(); - SQLExpr rightExp = primary(); - rightExp = relationalRest(rightExp); + String collate = lexer.stringVal(); + mysqlCharExpr.setCollate(collate); + accept(Token.IDENTIFIER); + } + + expr = mysqlCharExpr; - return new SQLBinaryOpExpr(expr, SQLBinaryOperator.Modulus, rightExp, JdbcConstants.MYSQL); + return primaryRest(expr); } + } else if (expr instanceof SQLCharExpr) { + SQLMethodInvokeExpr concat = new SQLMethodInvokeExpr("CONCAT"); + concat.addParameter(expr); + do { + String chars = lexer.stringVal(); + concat.addParameter(new SQLCharExpr(chars)); + lexer.nextToken(); + } while (lexer.token() == Token.LITERAL_CHARS || lexer.token() == Token.LITERAL_ALIAS); + expr = concat; + } + } else if (lexer.token() == Token.IDENTIFIER) { + if (expr instanceof SQLHexExpr) { + if ("USING".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } + String charSet = lexer.stringVal(); + lexer.nextToken(); + expr.getAttributes().put("USING", charSet); + + return primaryRest(expr); + } + } else if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); - return super.multiplicativeRest(expr); - } + if (lexer.token() == Token.EQ) { + lexer.nextToken(); + } - public SQLExpr notRationalRest(SQLExpr expr) { - if (identifierEquals("REGEXP")) { - lexer.nextToken(); - SQLExpr rightExp = primary(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } - rightExp = relationalRest(rightExp); + String collate = lexer.stringVal(); + lexer.nextToken(); - return new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotRegExp, rightExp, JdbcConstants.MYSQL); - } + expr = + new SQLBinaryOpExpr( + expr, + SQLBinaryOperator.COLLATE, + new SQLIdentifierExpr(collate), + JdbcConstants.MYSQL); - return super.notRationalRest(expr); - } + return primaryRest(expr); + } else if (expr instanceof SQLVariantRefExpr) { + if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); - public SQLExpr primary2() { - final Token tok = lexer.token(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } - if (identifierEquals("outfile")) { - lexer.nextToken(); - SQLExpr file = primary(); - SQLExpr expr = new MySqlOutFileExpr(file); + String collate = lexer.stringVal(); + lexer.nextToken(); - return primaryRest(expr); + expr.putAttribute("COLLATE", collate); + return primaryRest(expr); } - - switch (tok) { - case LITERAL_ALIAS: - String aliasValue = lexer.stringVal(); - lexer.nextToken(); - return primaryRest(new SQLCharExpr(aliasValue)); - case VARIANT: - SQLVariantRefExpr varRefExpr = new SQLVariantRefExpr(lexer.stringVal()); - lexer.nextToken(); - if (varRefExpr.getName().equalsIgnoreCase("@@global")) { - accept(Token.DOT); - varRefExpr = new SQLVariantRefExpr(lexer.stringVal(), true); - lexer.nextToken(); - } else if (varRefExpr.getName().equals("@") && lexer.token() == Token.LITERAL_CHARS) { - varRefExpr.setName("@'" + lexer.stringVal() + "'"); - lexer.nextToken(); - } else if (varRefExpr.getName().equals("@@") && lexer.token() == Token.LITERAL_CHARS) { - varRefExpr.setName("@@'" + lexer.stringVal() + "'"); - lexer.nextToken(); - } - return primaryRest(varRefExpr); - case VALUES: - lexer.nextToken(); - if (lexer.token() != Token.LPAREN) { - throw new ParserException("Syntax error: " + lexer.token()); - } - return this.methodRest(new SQLIdentifierExpr("VALUES"), true); - case BINARY: - lexer.nextToken(); - if (lexer.token() == Token.COMMA || lexer.token() == Token.SEMI || lexer.token() == Token.EOF) { - return new SQLIdentifierExpr("BINARY"); - } else { - SQLUnaryExpr binaryExpr = new SQLUnaryExpr(SQLUnaryOperator.BINARY, expr()); - return primaryRest(binaryExpr); - } - case CACHE: - case GROUP: - lexer.nextToken(); - return primaryRest(new SQLIdentifierExpr(lexer.stringVal())); - case DOT: - lexer.nextToken(); - return primaryRest(new SQLIdentifierExpr("." + lexer.stringVal())); - default: - return super.primary(); + } else if (expr instanceof SQLIntegerExpr) { + SQLIntegerExpr intExpr = (SQLIntegerExpr) expr; + String binaryString = lexer.stringVal(); + if (intExpr.getNumber().intValue() == 0 && binaryString.startsWith("b")) { + lexer.nextToken(); + expr = new SQLBinaryExpr(binaryString.substring(1)); + + return primaryRest(expr); } - + } } + if (lexer.token() == Token.LPAREN && expr instanceof SQLIdentifierExpr) { + SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; + String ident = identExpr.getName(); - public final SQLExpr primaryRest(SQLExpr expr) { - if (expr == null) { - throw new IllegalArgumentException("Illegal expression: NULL"); - } + if ("EXTRACT".equalsIgnoreCase(ident)) { + lexer.nextToken(); - if (lexer.token() == Token.LITERAL_CHARS) { - if (expr instanceof SQLIdentifierExpr) { - SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; - String ident = identExpr.getName(); - - if (ident.equalsIgnoreCase("x")) { - String charValue = lexer.stringVal(); - lexer.nextToken(); - expr = new SQLHexExpr(charValue); - - return primaryRest(expr); - } else if (ident.equalsIgnoreCase("b")) { - String charValue = lexer.stringVal(); - lexer.nextToken(); - expr = new SQLBinaryExpr(charValue); - - return primaryRest(expr); - } else if (ident.startsWith("_")) { - String charValue = lexer.stringVal(); - lexer.nextToken(); - - MySqlCharExpr mysqlCharExpr = new MySqlCharExpr(charValue); - mysqlCharExpr.setCharset(identExpr.getName()); - if (identifierEquals("COLLATE")) { - lexer.nextToken(); - - String collate = lexer.stringVal(); - mysqlCharExpr.setCollate(collate); - accept(Token.IDENTIFIER); - } - - expr = mysqlCharExpr; - - return primaryRest(expr); - } - } else if (expr instanceof SQLCharExpr) { - SQLMethodInvokeExpr concat = new SQLMethodInvokeExpr("CONCAT"); - concat.addParameter(expr); - do { - String chars = lexer.stringVal(); - concat.addParameter(new SQLCharExpr(chars)); - lexer.nextToken(); - } while (lexer.token() == Token.LITERAL_CHARS || lexer.token() == Token.LITERAL_ALIAS); - expr = concat; - } - } else if (lexer.token() == Token.IDENTIFIER) { - if (expr instanceof SQLHexExpr) { - if ("USING".equalsIgnoreCase(lexer.stringVal())) { - lexer.nextToken(); - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - String charSet = lexer.stringVal(); - lexer.nextToken(); - expr.getAttributes().put("USING", charSet); - - return primaryRest(expr); - } - } else if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { - lexer.nextToken(); - - if (lexer.token() == Token.EQ) { - lexer.nextToken(); - } - - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - String collate = lexer.stringVal(); - lexer.nextToken(); - - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.COLLATE, - new SQLIdentifierExpr(collate), JdbcConstants.MYSQL); - - return primaryRest(expr); - } else if (expr instanceof SQLVariantRefExpr) { - if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { - lexer.nextToken(); - - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - String collate = lexer.stringVal(); - lexer.nextToken(); - - expr.putAttribute("COLLATE", collate); - - return primaryRest(expr); - } - } else if (expr instanceof SQLIntegerExpr) { - SQLIntegerExpr intExpr = (SQLIntegerExpr) expr; - String binaryString = lexer.stringVal(); - if (intExpr.getNumber().intValue() == 0 && binaryString.startsWith("b")) { - lexer.nextToken(); - expr = new SQLBinaryExpr(binaryString.substring(1)); - - return primaryRest(expr); - } - } + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); } - if (lexer.token() == Token.LPAREN && expr instanceof SQLIdentifierExpr) { - SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; - String ident = identExpr.getName(); - - if ("EXTRACT".equalsIgnoreCase(ident)) { - lexer.nextToken(); - - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - String unitVal = lexer.stringVal(); - MySqlIntervalUnit unit = MySqlIntervalUnit.valueOf(unitVal.toUpperCase()); - lexer.nextToken(); - - accept(Token.FROM); - - SQLExpr value = expr(); - - MySqlExtractExpr extract = new MySqlExtractExpr(); - extract.setValue(value); - extract.setUnit(unit); - accept(Token.RPAREN); - - expr = extract; - - return primaryRest(expr); - } else if ("SUBSTRING".equalsIgnoreCase(ident)) { - lexer.nextToken(); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - for (; ; ) { - SQLExpr param = expr(); - methodInvokeExpr.addParameter(param); - - if (lexer.token() == Token.COMMA) { - lexer.nextToken(); - continue; - } else if (lexer.token() == Token.FROM) { - lexer.nextToken(); - SQLExpr from = expr(); - methodInvokeExpr.addParameter(from); - - if (lexer.token() == Token.FOR) { - lexer.nextToken(); - SQLExpr forExpr = expr(); - methodInvokeExpr.addParameter(forExpr); - } - break; - } else if (lexer.token() == Token.RPAREN) { - break; - } else { - throw new ParserException("Syntax error: " + lexer.token()); - } - } - - accept(Token.RPAREN); - expr = methodInvokeExpr; - - return primaryRest(expr); - } else if ("TRIM".equalsIgnoreCase(ident)) { - lexer.nextToken(); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - - if (lexer.token() == Token.IDENTIFIER) { - String flagVal = lexer.stringVal(); - if ("LEADING".equalsIgnoreCase(flagVal)) { - lexer.nextToken(); - methodInvokeExpr.getAttributes().put("TRIM_TYPE", "LEADING"); - } else if ("BOTH".equalsIgnoreCase(flagVal)) { - lexer.nextToken(); - methodInvokeExpr.getAttributes().put("TRIM_TYPE", "BOTH"); - } else if ("TRAILING".equalsIgnoreCase(flagVal)) { - lexer.nextToken(); - methodInvokeExpr.putAttribute("TRIM_TYPE", "TRAILING"); - } - } - - SQLExpr param = expr(); - methodInvokeExpr.addParameter(param); - - if (lexer.token() == Token.FROM) { - lexer.nextToken(); - SQLExpr from = expr(); - methodInvokeExpr.putAttribute("FROM", from); - } - - accept(Token.RPAREN); - expr = methodInvokeExpr; - - return primaryRest(expr); - } else if ("MATCH".equalsIgnoreCase(ident)) { - lexer.nextToken(); - MySqlMatchAgainstExpr matchAgainstExpr = new MySqlMatchAgainstExpr(); - - if (lexer.token() == Token.RPAREN) { - lexer.nextToken(); - } else { - exprList(matchAgainstExpr.getColumns(), matchAgainstExpr); - accept(Token.RPAREN); - } - - acceptIdentifier("AGAINST"); - - accept(Token.LPAREN); - SQLExpr against = primary(); - matchAgainstExpr.setAgainst(against); - - if (lexer.token() == Token.IN) { - lexer.nextToken(); - if (identifierEquals("NATURAL")) { - lexer.nextToken(); - acceptIdentifier("LANGUAGE"); - acceptIdentifier("MODE"); - if (lexer.token() == Token.WITH) { - lexer.nextToken(); - acceptIdentifier("QUERY"); - acceptIdentifier("EXPANSION"); - matchAgainstExpr.setSearchModifier( - MySqlMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE_WITH_QUERY_EXPANSION); - } else { - matchAgainstExpr.setSearchModifier( - MySqlMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE); - } - } else if (identifierEquals("BOOLEAN")) { - lexer.nextToken(); - acceptIdentifier("MODE"); - matchAgainstExpr.setSearchModifier(MySqlMatchAgainstExpr.SearchModifier.IN_BOOLEAN_MODE); - } else { - throw new ParserException("Syntax error: " + lexer.token()); - } - } else if (lexer.token() == Token.WITH) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - accept(Token.RPAREN); - - expr = matchAgainstExpr; - - return primaryRest(expr); - } else if ("CONVERT".equalsIgnoreCase(ident)) { - lexer.nextToken(); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - - if (lexer.token() != Token.RPAREN) { - exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); - } - - if (identifierEquals("USING")) { - lexer.nextToken(); - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - String charset = lexer.stringVal(); - lexer.nextToken(); - methodInvokeExpr.putAttribute("USING", charset); - } - - accept(Token.RPAREN); - - expr = methodInvokeExpr; - - return primaryRest(expr); - } else if ("POSITION".equalsIgnoreCase(ident)) { - accept(Token.LPAREN); - SQLExpr subStr = this.primary(); - accept(Token.IN); - SQLExpr str = this.expr(); - accept(Token.RPAREN); - - SQLMethodInvokeExpr locate = new SQLMethodInvokeExpr("LOCATE"); - locate.addParameter(subStr); - locate.addParameter(str); - - expr = locate; - return primaryRest(expr); - } - } + String unitVal = lexer.stringVal(); + MySqlIntervalUnit unit = MySqlIntervalUnit.valueOf(unitVal.toUpperCase()); + lexer.nextToken(); + + accept(Token.FROM); + + SQLExpr value = expr(); + + MySqlExtractExpr extract = new MySqlExtractExpr(); + extract.setValue(value); + extract.setUnit(unit); + accept(Token.RPAREN); + + expr = extract; + + return primaryRest(expr); + } else if ("SUBSTRING".equalsIgnoreCase(ident)) { + lexer.nextToken(); + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); + for (; ; ) { + SQLExpr param = expr(); + methodInvokeExpr.addParameter(param); - if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { + if (lexer.token() == Token.COMMA) { lexer.nextToken(); - MySqlUserName userName = new MySqlUserName(); - if (expr instanceof SQLCharExpr) { - userName.setUserName(((SQLCharExpr) expr).toString()); - } else { - userName.setUserName(((SQLIdentifierExpr) expr).getName()); - } + continue; + } else if (lexer.token() == Token.FROM) { + lexer.nextToken(); + SQLExpr from = expr(); + methodInvokeExpr.addParameter(from); - if (lexer.token() == Token.LITERAL_CHARS) { - userName.setHost("'" + lexer.stringVal() + "'"); - } else { - userName.setHost(lexer.stringVal()); + if (lexer.token() == Token.FOR) { + lexer.nextToken(); + SQLExpr forExpr = expr(); + methodInvokeExpr.addParameter(forExpr); } - lexer.nextToken(); - return userName; + break; + } else if (lexer.token() == Token.RPAREN) { + break; + } else { + throw new ParserException("Syntax error: " + lexer.token()); + } } - // - if (expr instanceof SQLMethodInvokeExpr && lexer.token() == Token.LBRACKET) { + accept(Token.RPAREN); + expr = methodInvokeExpr; + + return primaryRest(expr); + } else if ("TRIM".equalsIgnoreCase(ident)) { + lexer.nextToken(); + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); + + if (lexer.token() == Token.IDENTIFIER) { + String flagVal = lexer.stringVal(); + if ("LEADING".equalsIgnoreCase(flagVal)) { + lexer.nextToken(); + methodInvokeExpr.getAttributes().put("TRIM_TYPE", "LEADING"); + } else if ("BOTH".equalsIgnoreCase(flagVal)) { + lexer.nextToken(); + methodInvokeExpr.getAttributes().put("TRIM_TYPE", "BOTH"); + } else if ("TRAILING".equalsIgnoreCase(flagVal)) { lexer.nextToken(); - expr = bracketRest(expr); - return primaryRest(expr); + methodInvokeExpr.putAttribute("TRIM_TYPE", "TRAILING"); + } } - if (lexer.token() == Token.ERROR) { - throw new ParserException("Syntax error, token: " + lexer.token() + " " + lexer.stringVal() + ", pos: " - + lexer.pos()); - } + SQLExpr param = expr(); + methodInvokeExpr.addParameter(param); - /** - * When the druid parser parses the quoted field in SELECT clause, e.g. SELECT `b`.`lastname` FROM bank AS `b`, - * "`b`" is recognized as an identifier expr, and the token is DOT, then the next identifier "`lastname`" would - * be recognized as the property name of "`b`". The parser creates a SQLPropertyExpr with owner of "`b`" and - * property name of "`lastname`". - * - * The following block of code prevents this specific case to generate SQLPropertyExpr, but corrects the parser - * to generate a SQLIdentifierExpr with expr = "`b`.`lastname`". - */ - if (lexer.token() == Token.DOT && expr instanceof SQLIdentifierExpr) { - if (isQuoted(((SQLIdentifierExpr) expr).getName(), "`")) { - lexer.nextToken(); - ((SQLIdentifierExpr) expr).setName(((SQLIdentifierExpr) expr).getName() + "." + lexer.stringVal()); - lexer.nextToken(); - } + if (lexer.token() == Token.FROM) { + lexer.nextToken(); + SQLExpr from = expr(); + methodInvokeExpr.putAttribute("FROM", from); } - return super.primaryRest(expr); - } + accept(Token.RPAREN); + expr = methodInvokeExpr; - protected SQLExpr bracketRest(SQLExpr expr) { - Number index; + return primaryRest(expr); + } else if ("MATCH".equalsIgnoreCase(ident)) { + lexer.nextToken(); + MySqlMatchAgainstExpr matchAgainstExpr = new MySqlMatchAgainstExpr(); - if (lexer.token() == Token.LITERAL_INT) { - index = lexer.integerValue(); - lexer.nextToken(); + if (lexer.token() == Token.RPAREN) { + lexer.nextToken(); } else { - throw new ParserException("Syntax error : " + lexer.stringVal()); + exprList(matchAgainstExpr.getColumns(), matchAgainstExpr); + accept(Token.RPAREN); } - if (expr instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) expr; - methodInvokeExpr.getParameters().add(new SQLIntegerExpr(index)); + acceptIdentifier("AGAINST"); + + accept(Token.LPAREN); + SQLExpr against = primary(); + matchAgainstExpr.setAgainst(against); + + if (lexer.token() == Token.IN) { + lexer.nextToken(); + if (identifierEquals("NATURAL")) { + lexer.nextToken(); + acceptIdentifier("LANGUAGE"); + acceptIdentifier("MODE"); + if (lexer.token() == Token.WITH) { + lexer.nextToken(); + acceptIdentifier("QUERY"); + acceptIdentifier("EXPANSION"); + matchAgainstExpr.setSearchModifier( + MySqlMatchAgainstExpr.SearchModifier + .IN_NATURAL_LANGUAGE_MODE_WITH_QUERY_EXPANSION); + } else { + matchAgainstExpr.setSearchModifier( + MySqlMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE); + } + } else if (identifierEquals("BOOLEAN")) { + lexer.nextToken(); + acceptIdentifier("MODE"); + matchAgainstExpr.setSearchModifier( + MySqlMatchAgainstExpr.SearchModifier.IN_BOOLEAN_MODE); + } else { + throw new ParserException("Syntax error: " + lexer.token()); + } + } else if (lexer.token() == Token.WITH) { + throw new ParserException("Syntax error: " + lexer.token()); } + + accept(Token.RPAREN); + + expr = matchAgainstExpr; + + return primaryRest(expr); + } else if ("CONVERT".equalsIgnoreCase(ident)) { lexer.nextToken(); - expr = primaryRest(expr); - return expr; - } + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - public SQLSelectParser createSelectParser() { - return new ElasticSqlSelectParser(this); - } + if (lexer.token() != Token.RPAREN) { + exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); + } - protected SQLExpr parseInterval() { - accept(Token.INTERVAL); + if (identifierEquals("USING")) { + lexer.nextToken(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } + String charset = lexer.stringVal(); + lexer.nextToken(); + methodInvokeExpr.putAttribute("USING", charset); + } - if (lexer.token() == Token.LPAREN) { - lexer.nextToken(); + accept(Token.RPAREN); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("INTERVAL"); - if (lexer.token() != Token.RPAREN) { - exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); - } + expr = methodInvokeExpr; - accept(Token.RPAREN); + return primaryRest(expr); + } else if ("POSITION".equalsIgnoreCase(ident)) { + accept(Token.LPAREN); + SQLExpr subStr = this.primary(); + accept(Token.IN); + SQLExpr str = this.expr(); + accept(Token.RPAREN); - return primaryRest(methodInvokeExpr); - } else { - SQLExpr value = expr(); + SQLMethodInvokeExpr locate = new SQLMethodInvokeExpr("LOCATE"); + locate.addParameter(subStr); + locate.addParameter(str); - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } + expr = locate; + return primaryRest(expr); + } + } - String unit = lexer.stringVal(); - lexer.nextToken(); + if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { + lexer.nextToken(); + MySqlUserName userName = new MySqlUserName(); + if (expr instanceof SQLCharExpr) { + userName.setUserName(((SQLCharExpr) expr).toString()); + } else { + userName.setUserName(((SQLIdentifierExpr) expr).getName()); + } + + if (lexer.token() == Token.LITERAL_CHARS) { + userName.setHost("'" + lexer.stringVal() + "'"); + } else { + userName.setHost(lexer.stringVal()); + } + lexer.nextToken(); + return userName; + } - MySqlIntervalExpr intervalExpr = new MySqlIntervalExpr(); - intervalExpr.setValue(value); - intervalExpr.setUnit(MySqlIntervalUnit.valueOf(unit.toUpperCase())); + // + if (expr instanceof SQLMethodInvokeExpr && lexer.token() == Token.LBRACKET) { + lexer.nextToken(); + expr = bracketRest(expr); + return primaryRest(expr); + } - return intervalExpr; - } + if (lexer.token() == Token.ERROR) { + throw new ParserException( + "Syntax error, token: " + + lexer.token() + + " " + + lexer.stringVal() + + ", pos: " + + lexer.pos()); + } + + /** + * When the druid parser parses the quoted field in SELECT clause, e.g. SELECT `b`.`lastname` + * FROM bank AS `b`, "`b`" is recognized as an identifier expr, and the token is DOT, then the + * next identifier "`lastname`" would be recognized as the property name of "`b`". The parser + * creates a SQLPropertyExpr with owner of "`b`" and property name of "`lastname`". + * + *

The following block of code prevents this specific case to generate SQLPropertyExpr, but + * corrects the parser to generate a SQLIdentifierExpr with expr = "`b`.`lastname`". + */ + if (lexer.token() == Token.DOT && expr instanceof SQLIdentifierExpr) { + if (isQuoted(((SQLIdentifierExpr) expr).getName(), "`")) { + lexer.nextToken(); + ((SQLIdentifierExpr) expr) + .setName(((SQLIdentifierExpr) expr).getName() + "." + lexer.stringVal()); + lexer.nextToken(); + } } - public SQLColumnDefinition parseColumn() { - MySqlSQLColumnDefinition column = new MySqlSQLColumnDefinition(); - column.setName(name()); - column.setDataType(parseDataType()); + return super.primaryRest(expr); + } + + protected SQLExpr bracketRest(SQLExpr expr) { + Number index; - return parseColumnRest(column); + if (lexer.token() == Token.LITERAL_INT) { + index = lexer.integerValue(); + lexer.nextToken(); + } else { + throw new ParserException("Syntax error : " + lexer.stringVal()); } - public SQLColumnDefinition parseColumnRest(SQLColumnDefinition column) { - if (lexer.token() == Token.ON) { - lexer.nextToken(); - accept(Token.UPDATE); - SQLExpr expr = this.expr(); - ((MySqlSQLColumnDefinition) column).setOnUpdate(expr); - } + if (expr instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) expr; + methodInvokeExpr.getParameters().add(new SQLIntegerExpr(index)); + } + lexer.nextToken(); + expr = primaryRest(expr); + return expr; + } - if (identifierEquals("AUTO_INCREMENT")) { - lexer.nextToken(); - if (column instanceof MySqlSQLColumnDefinition) { - ((MySqlSQLColumnDefinition) column).setAutoIncrement(true); - } - return parseColumnRest(column); - } + public SQLSelectParser createSelectParser() { + return new ElasticSqlSelectParser(this); + } - if (identifierEquals("precision") && column.getDataType().getName().equalsIgnoreCase("double")) { - lexer.nextToken(); - } + protected SQLExpr parseInterval() { + accept(Token.INTERVAL); - if (identifierEquals("PARTITION")) { - throw new ParserException("syntax error " + lexer.token() + " " + lexer.stringVal()); - } + if (lexer.token() == Token.LPAREN) { + lexer.nextToken(); - if (identifierEquals("STORAGE")) { - lexer.nextToken(); - SQLExpr expr = expr(); - if (column instanceof MySqlSQLColumnDefinition) { - ((MySqlSQLColumnDefinition) column).setStorage(expr); - } - } + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("INTERVAL"); + if (lexer.token() != Token.RPAREN) { + exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); + } - super.parseColumnRest(column); + accept(Token.RPAREN); - return column; + return primaryRest(methodInvokeExpr); + } else { + SQLExpr value = expr(); + + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } + + String unit = lexer.stringVal(); + lexer.nextToken(); + + MySqlIntervalExpr intervalExpr = new MySqlIntervalExpr(); + intervalExpr.setValue(value); + intervalExpr.setUnit(MySqlIntervalUnit.valueOf(unit.toUpperCase())); + + return intervalExpr; + } + } + + public SQLColumnDefinition parseColumn() { + MySqlSQLColumnDefinition column = new MySqlSQLColumnDefinition(); + column.setName(name()); + column.setDataType(parseDataType()); + + return parseColumnRest(column); + } + + public SQLColumnDefinition parseColumnRest(SQLColumnDefinition column) { + if (lexer.token() == Token.ON) { + lexer.nextToken(); + accept(Token.UPDATE); + SQLExpr expr = this.expr(); + ((MySqlSQLColumnDefinition) column).setOnUpdate(expr); } - protected SQLDataType parseDataTypeRest(SQLDataType dataType) { - super.parseDataTypeRest(dataType); + if (identifierEquals("AUTO_INCREMENT")) { + lexer.nextToken(); + if (column instanceof MySqlSQLColumnDefinition) { + ((MySqlSQLColumnDefinition) column).setAutoIncrement(true); + } + return parseColumnRest(column); + } - if (identifierEquals("UNSIGNED")) { - lexer.nextToken(); - dataType.getAttributes().put("UNSIGNED", true); - } + if (identifierEquals("precision") + && column.getDataType().getName().equalsIgnoreCase("double")) { + lexer.nextToken(); + } - if (identifierEquals("ZEROFILL")) { - lexer.nextToken(); - dataType.getAttributes().put("ZEROFILL", true); - } + if (identifierEquals("PARTITION")) { + throw new ParserException("syntax error " + lexer.token() + " " + lexer.stringVal()); + } - return dataType; + if (identifierEquals("STORAGE")) { + lexer.nextToken(); + SQLExpr expr = expr(); + if (column instanceof MySqlSQLColumnDefinition) { + ((MySqlSQLColumnDefinition) column).setStorage(expr); + } } - public SQLExpr orRest(SQLExpr expr) { + super.parseColumnRest(column); - for (; ; ) { - if (lexer.token() == Token.OR || lexer.token() == Token.BARBAR) { - lexer.nextToken(); - SQLExpr rightExp = and(); + return column; + } - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanOr, rightExp, JdbcConstants.MYSQL); - } else if (lexer.token() == Token.XOR) { - lexer.nextToken(); - SQLExpr rightExp = and(); + protected SQLDataType parseDataTypeRest(SQLDataType dataType) { + super.parseDataTypeRest(dataType); - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanXor, rightExp, JdbcConstants.MYSQL); - } else { - break; - } - } + if (identifierEquals("UNSIGNED")) { + lexer.nextToken(); + dataType.getAttributes().put("UNSIGNED", true); + } - return expr; + if (identifierEquals("ZEROFILL")) { + lexer.nextToken(); + dataType.getAttributes().put("ZEROFILL", true); } - public SQLExpr additiveRest(SQLExpr expr) { - if (lexer.token() == Token.PLUS) { - lexer.nextToken(); - SQLExpr rightExp = multiplicative(); + return dataType; + } - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Add, rightExp, JdbcConstants.MYSQL); - expr = additiveRest(expr); - } else if (lexer.token() == Token.SUB) { - lexer.nextToken(); - SQLExpr rightExp = multiplicative(); + public SQLExpr orRest(SQLExpr expr) { - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Subtract, rightExp, JdbcConstants.MYSQL); - expr = additiveRest(expr); - } + for (; ; ) { + if (lexer.token() == Token.OR || lexer.token() == Token.BARBAR) { + lexer.nextToken(); + SQLExpr rightExp = and(); + + expr = + new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanOr, rightExp, JdbcConstants.MYSQL); + } else if (lexer.token() == Token.XOR) { + lexer.nextToken(); + SQLExpr rightExp = and(); - return expr; + expr = + new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanXor, rightExp, JdbcConstants.MYSQL); + } else { + break; + } } - public SQLAssignItem parseAssignItem() { - SQLAssignItem item = new SQLAssignItem(); + return expr; + } - SQLExpr var = primary(); + public SQLExpr additiveRest(SQLExpr expr) { + if (lexer.token() == Token.PLUS) { + lexer.nextToken(); + SQLExpr rightExp = multiplicative(); - String ident = null; - if (var instanceof SQLIdentifierExpr) { - ident = ((SQLIdentifierExpr) var).getName(); + expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Add, rightExp, JdbcConstants.MYSQL); + expr = additiveRest(expr); + } else if (lexer.token() == Token.SUB) { + lexer.nextToken(); + SQLExpr rightExp = multiplicative(); - if ("GLOBAL".equalsIgnoreCase(ident)) { - ident = lexer.stringVal(); - lexer.nextToken(); - var = new SQLVariantRefExpr(ident, true); - } else if ("SESSION".equalsIgnoreCase(ident)) { - ident = lexer.stringVal(); - lexer.nextToken(); - var = new SQLVariantRefExpr(ident, false); - } else { - var = new SQLVariantRefExpr(ident); - } - } + expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Subtract, rightExp, JdbcConstants.MYSQL); + expr = additiveRest(expr); + } - if ("NAMES".equalsIgnoreCase(ident)) { - // skip - } else if ("CHARACTER".equalsIgnoreCase(ident)) { - var = new SQLIdentifierExpr("CHARACTER SET"); - accept(Token.SET); - if (lexer.token() == Token.EQ) { - lexer.nextToken(); - } - } else { - if (lexer.token() == Token.COLONEQ) { - lexer.nextToken(); - } else { - accept(Token.EQ); - } - } + return expr; + } + + public SQLAssignItem parseAssignItem() { + SQLAssignItem item = new SQLAssignItem(); - item.setValue(this.expr()); + SQLExpr var = primary(); - item.setTarget(var); - return item; + String ident = null; + if (var instanceof SQLIdentifierExpr) { + ident = ((SQLIdentifierExpr) var).getName(); + + if ("GLOBAL".equalsIgnoreCase(ident)) { + ident = lexer.stringVal(); + lexer.nextToken(); + var = new SQLVariantRefExpr(ident, true); + } else if ("SESSION".equalsIgnoreCase(ident)) { + ident = lexer.stringVal(); + lexer.nextToken(); + var = new SQLVariantRefExpr(ident, false); + } else { + var = new SQLVariantRefExpr(ident); + } } - public SQLName nameRest(SQLName name) { - if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { - lexer.nextToken(); - MySqlUserName userName = new MySqlUserName(); - userName.setUserName(((SQLIdentifierExpr) name).getName()); + if ("NAMES".equalsIgnoreCase(ident)) { + // skip + } else if ("CHARACTER".equalsIgnoreCase(ident)) { + var = new SQLIdentifierExpr("CHARACTER SET"); + accept(Token.SET); + if (lexer.token() == Token.EQ) { + lexer.nextToken(); + } + } else { + if (lexer.token() == Token.COLONEQ) { + lexer.nextToken(); + } else { + accept(Token.EQ); + } + } - if (lexer.token() == Token.LITERAL_CHARS) { - userName.setHost("'" + lexer.stringVal() + "'"); - } else { - userName.setHost(lexer.stringVal()); - } - lexer.nextToken(); - return userName; - } - return super.nameRest(name); + item.setValue(this.expr()); + + item.setTarget(var); + return item; + } + + public SQLName nameRest(SQLName name) { + if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { + lexer.nextToken(); + MySqlUserName userName = new MySqlUserName(); + userName.setUserName(((SQLIdentifierExpr) name).getName()); + + if (lexer.token() == Token.LITERAL_CHARS) { + userName.setHost("'" + lexer.stringVal() + "'"); + } else { + userName.setHost(lexer.stringVal()); + } + lexer.nextToken(); + return userName; } + return super.nameRest(name); + } - public MySqlSelectQueryBlock.Limit parseLimit() { - if (lexer.token() == Token.LIMIT) { - lexer.nextToken(); + public MySqlSelectQueryBlock.Limit parseLimit() { + if (lexer.token() == Token.LIMIT) { + lexer.nextToken(); - MySqlSelectQueryBlock.Limit limit = new MySqlSelectQueryBlock.Limit(); - - SQLExpr temp = this.expr(); - if (lexer.token() == (Token.COMMA)) { - limit.setOffset(temp); - lexer.nextToken(); - limit.setRowCount(this.expr()); - } else if (identifierEquals("OFFSET")) { - limit.setRowCount(temp); - lexer.nextToken(); - limit.setOffset(this.expr()); - } else { - limit.setRowCount(temp); - } - return limit; - } + MySqlSelectQueryBlock.Limit limit = new MySqlSelectQueryBlock.Limit(); - return null; + SQLExpr temp = this.expr(); + if (lexer.token() == (Token.COMMA)) { + limit.setOffset(temp); + lexer.nextToken(); + limit.setRowCount(this.expr()); + } else if (identifierEquals("OFFSET")) { + limit.setRowCount(temp); + lexer.nextToken(); + limit.setOffset(this.expr()); + } else { + limit.setRowCount(temp); + } + return limit; } - @Override - public MySqlPrimaryKey parsePrimaryKey() { - accept(Token.PRIMARY); - accept(Token.KEY); + return null; + } - MySqlPrimaryKey primaryKey = new MySqlPrimaryKey(); + @Override + public MySqlPrimaryKey parsePrimaryKey() { + accept(Token.PRIMARY); + accept(Token.KEY); - if (identifierEquals("USING")) { - lexer.nextToken(); - primaryKey.setIndexType(lexer.stringVal()); - lexer.nextToken(); - } + MySqlPrimaryKey primaryKey = new MySqlPrimaryKey(); - accept(Token.LPAREN); - for (; ; ) { - primaryKey.getColumns().add(this.expr()); - if (!(lexer.token() == (Token.COMMA))) { - break; - } else { - lexer.nextToken(); - } - } - accept(Token.RPAREN); + if (identifierEquals("USING")) { + lexer.nextToken(); + primaryKey.setIndexType(lexer.stringVal()); + lexer.nextToken(); + } - return primaryKey; + accept(Token.LPAREN); + for (; ; ) { + primaryKey.getColumns().add(this.expr()); + if (!(lexer.token() == (Token.COMMA))) { + break; + } else { + lexer.nextToken(); + } } + accept(Token.RPAREN); - public MySqlUnique parseUnique() { - accept(Token.UNIQUE); + return primaryKey; + } - if (lexer.token() == Token.KEY) { - lexer.nextToken(); - } + public MySqlUnique parseUnique() { + accept(Token.UNIQUE); - if (lexer.token() == Token.INDEX) { - lexer.nextToken(); - } + if (lexer.token() == Token.KEY) { + lexer.nextToken(); + } - MySqlUnique unique = new MySqlUnique(); + if (lexer.token() == Token.INDEX) { + lexer.nextToken(); + } - if (lexer.token() != Token.LPAREN) { - SQLName indexName = name(); - unique.setIndexName(indexName); - } + MySqlUnique unique = new MySqlUnique(); - accept(Token.LPAREN); - for (; ; ) { - unique.getColumns().add(this.expr()); - if (!(lexer.token() == (Token.COMMA))) { - break; - } else { - lexer.nextToken(); - } - } - accept(Token.RPAREN); + if (lexer.token() != Token.LPAREN) { + SQLName indexName = name(); + unique.setIndexName(indexName); + } - if (identifierEquals("USING")) { - lexer.nextToken(); - unique.setIndexType(lexer.stringVal()); - lexer.nextToken(); - } + accept(Token.LPAREN); + for (; ; ) { + unique.getColumns().add(this.expr()); + if (!(lexer.token() == (Token.COMMA))) { + break; + } else { + lexer.nextToken(); + } + } + accept(Token.RPAREN); - return unique; + if (identifierEquals("USING")) { + lexer.nextToken(); + unique.setIndexType(lexer.stringVal()); + lexer.nextToken(); } - public MysqlForeignKey parseForeignKey() { - accept(Token.FOREIGN); - accept(Token.KEY); + return unique; + } - MysqlForeignKey fk = new MysqlForeignKey(); + public MysqlForeignKey parseForeignKey() { + accept(Token.FOREIGN); + accept(Token.KEY); - if (lexer.token() != Token.LPAREN) { - SQLName indexName = name(); - fk.setIndexName(indexName); - } + MysqlForeignKey fk = new MysqlForeignKey(); - accept(Token.LPAREN); - this.names(fk.getReferencingColumns()); - accept(Token.RPAREN); + if (lexer.token() != Token.LPAREN) { + SQLName indexName = name(); + fk.setIndexName(indexName); + } - accept(Token.REFERENCES); + accept(Token.LPAREN); + this.names(fk.getReferencingColumns()); + accept(Token.RPAREN); - fk.setReferencedTableName(this.name()); + accept(Token.REFERENCES); - accept(Token.LPAREN); - this.names(fk.getReferencedColumns()); - accept(Token.RPAREN); + fk.setReferencedTableName(this.name()); - if (identifierEquals("MATCH")) { - if (identifierEquals("FULL")) { - fk.setReferenceMatch(MysqlForeignKey.Match.FULL); - } else if (identifierEquals("PARTIAL")) { - fk.setReferenceMatch(MysqlForeignKey.Match.PARTIAL); - } else if (identifierEquals("SIMPLE")) { - fk.setReferenceMatch(MysqlForeignKey.Match.SIMPLE); - } - } + accept(Token.LPAREN); + this.names(fk.getReferencedColumns()); + accept(Token.RPAREN); - if (lexer.token() == Token.ON) { - lexer.nextToken(); - if (lexer.token() == Token.DELETE) { - fk.setReferenceOn(MysqlForeignKey.On.DELETE); - } else if (lexer.token() == Token.UPDATE) { - fk.setReferenceOn(MysqlForeignKey.On.UPDATE); - } else { - throw new ParserException("Syntax error, expect DELETE or UPDATE, actual " + lexer.token() + " " - + lexer.stringVal()); - } - lexer.nextToken(); + if (identifierEquals("MATCH")) { + if (identifierEquals("FULL")) { + fk.setReferenceMatch(MysqlForeignKey.Match.FULL); + } else if (identifierEquals("PARTIAL")) { + fk.setReferenceMatch(MysqlForeignKey.Match.PARTIAL); + } else if (identifierEquals("SIMPLE")) { + fk.setReferenceMatch(MysqlForeignKey.Match.SIMPLE); + } + } - if (lexer.token() == Token.RESTRICT) { - fk.setReferenceOption(MysqlForeignKey.Option.RESTRICT); - } else if (identifierEquals("CASCADE")) { - fk.setReferenceOption(MysqlForeignKey.Option.CASCADE); - } else if (lexer.token() == Token.SET) { - accept(Token.NULL); - fk.setReferenceOption(MysqlForeignKey.Option.SET_NULL); - } else if (identifierEquals("ON")) { - lexer.nextToken(); - if (identifierEquals("ACTION")) { - fk.setReferenceOption(MysqlForeignKey.Option.NO_ACTION); - } else { - throw new ParserException("Syntax error, expect ACTION, actual " + lexer.token() + " " - + lexer.stringVal()); - } - } - lexer.nextToken(); + if (lexer.token() == Token.ON) { + lexer.nextToken(); + if (lexer.token() == Token.DELETE) { + fk.setReferenceOn(MysqlForeignKey.On.DELETE); + } else if (lexer.token() == Token.UPDATE) { + fk.setReferenceOn(MysqlForeignKey.On.UPDATE); + } else { + throw new ParserException( + "Syntax error, expect DELETE or UPDATE, actual " + + lexer.token() + + " " + + lexer.stringVal()); + } + lexer.nextToken(); + + if (lexer.token() == Token.RESTRICT) { + fk.setReferenceOption(MysqlForeignKey.Option.RESTRICT); + } else if (identifierEquals("CASCADE")) { + fk.setReferenceOption(MysqlForeignKey.Option.CASCADE); + } else if (lexer.token() == Token.SET) { + accept(Token.NULL); + fk.setReferenceOption(MysqlForeignKey.Option.SET_NULL); + } else if (identifierEquals("ON")) { + lexer.nextToken(); + if (identifierEquals("ACTION")) { + fk.setReferenceOption(MysqlForeignKey.Option.NO_ACTION); + } else { + throw new ParserException( + "Syntax error, expect ACTION, actual " + lexer.token() + " " + lexer.stringVal()); } - return fk; + } + lexer.nextToken(); } + return fk; + } - protected SQLAggregateExpr parseAggregateExprRest(SQLAggregateExpr aggregateExpr) { - if (lexer.token() == Token.ORDER) { - SQLOrderBy orderBy = this.parseOrderBy(); - aggregateExpr.putAttribute("ORDER BY", orderBy); - } - if (identifierEquals("SEPARATOR")) { - lexer.nextToken(); + protected SQLAggregateExpr parseAggregateExprRest(SQLAggregateExpr aggregateExpr) { + if (lexer.token() == Token.ORDER) { + SQLOrderBy orderBy = this.parseOrderBy(); + aggregateExpr.putAttribute("ORDER BY", orderBy); + } + if (identifierEquals("SEPARATOR")) { + lexer.nextToken(); - SQLExpr seperator = this.primary(); + SQLExpr seperator = this.primary(); - aggregateExpr.putAttribute("SEPARATOR", seperator); - } - return aggregateExpr; + aggregateExpr.putAttribute("SEPARATOR", seperator); } + return aggregateExpr; + } - public MySqlSelectGroupByExpr parseSelectGroupByItem() { - MySqlSelectGroupByExpr item = new MySqlSelectGroupByExpr(); - - item.setExpr(expr()); + public MySqlSelectGroupByExpr parseSelectGroupByItem() { + MySqlSelectGroupByExpr item = new MySqlSelectGroupByExpr(); - if (lexer.token() == Token.ASC) { - lexer.nextToken(); - item.setType(SQLOrderingSpecification.ASC); - } else if (lexer.token() == Token.DESC) { - lexer.nextToken(); - item.setType(SQLOrderingSpecification.DESC); - } + item.setExpr(expr()); - return item; + if (lexer.token() == Token.ASC) { + lexer.nextToken(); + item.setType(SQLOrderingSpecification.ASC); + } else if (lexer.token() == Token.DESC) { + lexer.nextToken(); + item.setType(SQLOrderingSpecification.DESC); } + return item; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java index 2038aa54ef..c405d90878 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -29,313 +28,311 @@ import com.alibaba.druid.sql.parser.Token; import org.opensearch.sql.legacy.exception.SqlFeatureNotImplementedException; -/** - * Created by allwefantasy on 8/19/16. - */ +/** Created by allwefantasy on 8/19/16. */ public class ElasticSqlSelectParser extends SQLSelectParser { - public ElasticSqlSelectParser(SQLExprParser exprParser) { - super(exprParser); + public ElasticSqlSelectParser(SQLExprParser exprParser) { + super(exprParser); + } + + @Override + public SQLSelectQuery query() { + if (lexer.token() == (Token.LPAREN)) { + lexer.nextToken(); + + SQLSelectQuery select = query(); + accept(Token.RPAREN); + + return queryRest(select); } - @Override - public SQLSelectQuery query() { - if (lexer.token() == (Token.LPAREN)) { - lexer.nextToken(); + MySqlSelectQueryBlock queryBlock = new MySqlSelectQueryBlock(); + + if (lexer.token() == Token.SELECT) { + lexer.nextToken(); + + if (lexer.token() == Token.HINT) { + this.exprParser.parseHints(queryBlock.getHints()); + } + + if (lexer.token() == Token.COMMENT) { + lexer.nextToken(); + } + + if (lexer.token() == (Token.DISTINCT)) { + queryBlock.setDistionOption(SQLSetQuantifier.DISTINCT); + lexer.nextToken(); + } else if (identifierEquals("DISTINCTROW")) { + queryBlock.setDistionOption(SQLSetQuantifier.DISTINCTROW); + lexer.nextToken(); + } else if (lexer.token() == (Token.ALL)) { + queryBlock.setDistionOption(SQLSetQuantifier.ALL); + lexer.nextToken(); + } + + if (identifierEquals("HIGH_PRIORITY")) { + queryBlock.setHignPriority(true); + lexer.nextToken(); + } + + if (identifierEquals("STRAIGHT_JOIN")) { + queryBlock.setStraightJoin(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_SMALL_RESULT")) { + queryBlock.setSmallResult(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_BIG_RESULT")) { + queryBlock.setBigResult(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_BUFFER_RESULT")) { + queryBlock.setBufferResult(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_CACHE")) { + queryBlock.setCache(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_NO_CACHE")) { + queryBlock.setCache(false); + lexer.nextToken(); + } + + if (identifierEquals("SQL_CALC_FOUND_ROWS")) { + queryBlock.setCalcFoundRows(true); + lexer.nextToken(); + } + + parseSelectList(queryBlock); + + parseInto(queryBlock); + } - SQLSelectQuery select = query(); - accept(Token.RPAREN); + parseFrom(queryBlock); - return queryRest(select); - } + parseWhere(queryBlock); - MySqlSelectQueryBlock queryBlock = new MySqlSelectQueryBlock(); + parseGroupBy(queryBlock); - if (lexer.token() == Token.SELECT) { - lexer.nextToken(); + queryBlock.setOrderBy(this.exprParser.parseOrderBy()); - if (lexer.token() == Token.HINT) { - this.exprParser.parseHints(queryBlock.getHints()); - } - - if (lexer.token() == Token.COMMENT) { - lexer.nextToken(); - } - - if (lexer.token() == (Token.DISTINCT)) { - queryBlock.setDistionOption(SQLSetQuantifier.DISTINCT); - lexer.nextToken(); - } else if (identifierEquals("DISTINCTROW")) { - queryBlock.setDistionOption(SQLSetQuantifier.DISTINCTROW); - lexer.nextToken(); - } else if (lexer.token() == (Token.ALL)) { - queryBlock.setDistionOption(SQLSetQuantifier.ALL); - lexer.nextToken(); - } - - if (identifierEquals("HIGH_PRIORITY")) { - queryBlock.setHignPriority(true); - lexer.nextToken(); - } - - if (identifierEquals("STRAIGHT_JOIN")) { - queryBlock.setStraightJoin(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_SMALL_RESULT")) { - queryBlock.setSmallResult(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_BIG_RESULT")) { - queryBlock.setBigResult(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_BUFFER_RESULT")) { - queryBlock.setBufferResult(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_CACHE")) { - queryBlock.setCache(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_NO_CACHE")) { - queryBlock.setCache(false); - lexer.nextToken(); - } - - if (identifierEquals("SQL_CALC_FOUND_ROWS")) { - queryBlock.setCalcFoundRows(true); - lexer.nextToken(); - } - - parseSelectList(queryBlock); - - parseInto(queryBlock); - } + if (lexer.token() == Token.LIMIT) { + queryBlock.setLimit(parseLimit()); + } - parseFrom(queryBlock); + if (lexer.token() == Token.PROCEDURE) { + lexer.nextToken(); + throw new SqlFeatureNotImplementedException("Unsupported feature: " + Token.PROCEDURE.name); + } - parseWhere(queryBlock); + parseInto(queryBlock); - parseGroupBy(queryBlock); + if (lexer.token() == Token.FOR) { + lexer.nextToken(); + accept(Token.UPDATE); - queryBlock.setOrderBy(this.exprParser.parseOrderBy()); + queryBlock.setForUpdate(true); + } - if (lexer.token() == Token.LIMIT) { - queryBlock.setLimit(parseLimit()); - } + if (lexer.token() == Token.LOCK) { + lexer.nextToken(); + accept(Token.IN); + acceptIdentifier("SHARE"); + acceptIdentifier("MODE"); + queryBlock.setLockInShareMode(true); + } - if (lexer.token() == Token.PROCEDURE) { - lexer.nextToken(); - throw new SqlFeatureNotImplementedException("Unsupported feature: " + Token.PROCEDURE.name); - } + return queryRest(queryBlock); + } + + protected void parseInto(SQLSelectQueryBlock queryBlock) { + if (lexer.token() == (Token.INTO)) { + lexer.nextToken(); + + if (identifierEquals("OUTFILE")) { + lexer.nextToken(); + + MySqlOutFileExpr outFile = new MySqlOutFileExpr(); + outFile.setFile(expr()); + + queryBlock.setInto(outFile); + + if (identifierEquals("FIELDS") || identifierEquals("COLUMNS")) { + lexer.nextToken(); - parseInto(queryBlock); + if (identifierEquals("TERMINATED")) { + lexer.nextToken(); + accept(Token.BY); + } + outFile.setColumnsTerminatedBy((SQLLiteralExpr) expr()); - if (lexer.token() == Token.FOR) { + if (identifierEquals("OPTIONALLY")) { lexer.nextToken(); - accept(Token.UPDATE); + outFile.setColumnsEnclosedOptionally(true); + } - queryBlock.setForUpdate(true); - } + if (identifierEquals("ENCLOSED")) { + lexer.nextToken(); + accept(Token.BY); + outFile.setColumnsEnclosedBy((SQLLiteralExpr) expr()); + } - if (lexer.token() == Token.LOCK) { + if (identifierEquals("ESCAPED")) { lexer.nextToken(); - accept(Token.IN); - acceptIdentifier("SHARE"); - acceptIdentifier("MODE"); - queryBlock.setLockInShareMode(true); + accept(Token.BY); + outFile.setColumnsEscaped((SQLLiteralExpr) expr()); + } } - return queryRest(queryBlock); - } + if (identifierEquals("LINES")) { + lexer.nextToken(); - protected void parseInto(SQLSelectQueryBlock queryBlock) { - if (lexer.token() == (Token.INTO)) { + if (identifierEquals("STARTING")) { lexer.nextToken(); - - if (identifierEquals("OUTFILE")) { - lexer.nextToken(); - - MySqlOutFileExpr outFile = new MySqlOutFileExpr(); - outFile.setFile(expr()); - - queryBlock.setInto(outFile); - - if (identifierEquals("FIELDS") || identifierEquals("COLUMNS")) { - lexer.nextToken(); - - if (identifierEquals("TERMINATED")) { - lexer.nextToken(); - accept(Token.BY); - } - outFile.setColumnsTerminatedBy((SQLLiteralExpr) expr()); - - if (identifierEquals("OPTIONALLY")) { - lexer.nextToken(); - outFile.setColumnsEnclosedOptionally(true); - } - - if (identifierEquals("ENCLOSED")) { - lexer.nextToken(); - accept(Token.BY); - outFile.setColumnsEnclosedBy((SQLLiteralExpr) expr()); - } - - if (identifierEquals("ESCAPED")) { - lexer.nextToken(); - accept(Token.BY); - outFile.setColumnsEscaped((SQLLiteralExpr) expr()); - } - } - - if (identifierEquals("LINES")) { - lexer.nextToken(); - - if (identifierEquals("STARTING")) { - lexer.nextToken(); - accept(Token.BY); - outFile.setLinesStartingBy((SQLLiteralExpr) expr()); - } else { - identifierEquals("TERMINATED"); - lexer.nextToken(); - accept(Token.BY); - outFile.setLinesTerminatedBy((SQLLiteralExpr) expr()); - } - } - } else { - queryBlock.setInto(this.exprParser.name()); - } + accept(Token.BY); + outFile.setLinesStartingBy((SQLLiteralExpr) expr()); + } else { + identifierEquals("TERMINATED"); + lexer.nextToken(); + accept(Token.BY); + outFile.setLinesTerminatedBy((SQLLiteralExpr) expr()); + } } + } else { + queryBlock.setInto(this.exprParser.name()); + } } + } - protected void parseGroupBy(SQLSelectQueryBlock queryBlock) { - SQLSelectGroupByClause groupBy = null; + protected void parseGroupBy(SQLSelectQueryBlock queryBlock) { + SQLSelectGroupByClause groupBy = null; - if (lexer.token() == Token.GROUP) { - groupBy = new SQLSelectGroupByClause(); + if (lexer.token() == Token.GROUP) { + groupBy = new SQLSelectGroupByClause(); - lexer.nextToken(); - accept(Token.BY); + lexer.nextToken(); + accept(Token.BY); - while (true) { - groupBy.addItem(this.getExprParser().parseSelectGroupByItem()); - if (!(lexer.token() == (Token.COMMA))) { - break; - } - lexer.nextToken(); - } - - if (lexer.token() == Token.WITH) { - lexer.nextToken(); - acceptIdentifier("ROLLUP"); - - MySqlSelectGroupBy mySqlGroupBy = new MySqlSelectGroupBy(); - for (SQLExpr sqlExpr : groupBy.getItems()) { - mySqlGroupBy.addItem(sqlExpr); - } - mySqlGroupBy.setRollUp(true); - - groupBy = mySqlGroupBy; - } + while (true) { + groupBy.addItem(this.getExprParser().parseSelectGroupByItem()); + if (!(lexer.token() == (Token.COMMA))) { + break; } + lexer.nextToken(); + } - if (lexer.token() == Token.HAVING) { - lexer.nextToken(); + if (lexer.token() == Token.WITH) { + lexer.nextToken(); + acceptIdentifier("ROLLUP"); - if (groupBy == null) { - groupBy = new SQLSelectGroupByClause(); - } - groupBy.setHaving(this.exprParser.expr()); + MySqlSelectGroupBy mySqlGroupBy = new MySqlSelectGroupBy(); + for (SQLExpr sqlExpr : groupBy.getItems()) { + mySqlGroupBy.addItem(sqlExpr); } + mySqlGroupBy.setRollUp(true); - queryBlock.setGroupBy(groupBy); + groupBy = mySqlGroupBy; + } } - protected SQLTableSource parseTableSourceRest(SQLTableSource tableSource) { - if (identifierEquals("USING")) { - return tableSource; - } + if (lexer.token() == Token.HAVING) { + lexer.nextToken(); - if (lexer.token() == Token.USE) { - lexer.nextToken(); - MySqlUseIndexHint hint = new MySqlUseIndexHint(); - parseIndexHint(hint); - tableSource.getHints().add(hint); - } + if (groupBy == null) { + groupBy = new SQLSelectGroupByClause(); + } + groupBy.setHaving(this.exprParser.expr()); + } - if (identifierEquals("IGNORE")) { - lexer.nextToken(); - MySqlIgnoreIndexHint hint = new MySqlIgnoreIndexHint(); - parseIndexHint(hint); - tableSource.getHints().add(hint); - } + queryBlock.setGroupBy(groupBy); + } - if (identifierEquals("FORCE")) { - lexer.nextToken(); - MySqlForceIndexHint hint = new MySqlForceIndexHint(); - parseIndexHint(hint); - tableSource.getHints().add(hint); - } + protected SQLTableSource parseTableSourceRest(SQLTableSource tableSource) { + if (identifierEquals("USING")) { + return tableSource; + } - return super.parseTableSourceRest(tableSource); + if (lexer.token() == Token.USE) { + lexer.nextToken(); + MySqlUseIndexHint hint = new MySqlUseIndexHint(); + parseIndexHint(hint); + tableSource.getHints().add(hint); } - private void parseIndexHint(MySqlIndexHintImpl hint) { - if (lexer.token() == Token.INDEX) { - lexer.nextToken(); - } else { - accept(Token.KEY); - } + if (identifierEquals("IGNORE")) { + lexer.nextToken(); + MySqlIgnoreIndexHint hint = new MySqlIgnoreIndexHint(); + parseIndexHint(hint); + tableSource.getHints().add(hint); + } - if (lexer.token() == Token.FOR) { - lexer.nextToken(); + if (identifierEquals("FORCE")) { + lexer.nextToken(); + MySqlForceIndexHint hint = new MySqlForceIndexHint(); + parseIndexHint(hint); + tableSource.getHints().add(hint); + } - if (lexer.token() == Token.JOIN) { - lexer.nextToken(); - hint.setOption(MySqlIndexHint.Option.JOIN); - } else if (lexer.token() == Token.ORDER) { - lexer.nextToken(); - accept(Token.BY); - hint.setOption(MySqlIndexHint.Option.ORDER_BY); - } else { - accept(Token.GROUP); - accept(Token.BY); - hint.setOption(MySqlIndexHint.Option.GROUP_BY); - } - } + return super.parseTableSourceRest(tableSource); + } - accept(Token.LPAREN); - if (lexer.token() == Token.PRIMARY) { - lexer.nextToken(); - hint.getIndexList().add(new SQLIdentifierExpr("PRIMARY")); - } else { - this.exprParser.names(hint.getIndexList()); - } - accept(Token.RPAREN); + private void parseIndexHint(MySqlIndexHintImpl hint) { + if (lexer.token() == Token.INDEX) { + lexer.nextToken(); + } else { + accept(Token.KEY); } - protected MySqlUnionQuery createSQLUnionQuery() { - return new MySqlUnionQuery(); + if (lexer.token() == Token.FOR) { + lexer.nextToken(); + + if (lexer.token() == Token.JOIN) { + lexer.nextToken(); + hint.setOption(MySqlIndexHint.Option.JOIN); + } else if (lexer.token() == Token.ORDER) { + lexer.nextToken(); + accept(Token.BY); + hint.setOption(MySqlIndexHint.Option.ORDER_BY); + } else { + accept(Token.GROUP); + accept(Token.BY); + hint.setOption(MySqlIndexHint.Option.GROUP_BY); + } } - public SQLUnionQuery unionRest(SQLUnionQuery union) { - if (lexer.token() == Token.LIMIT) { - MySqlUnionQuery mysqlUnionQuery = (MySqlUnionQuery) union; - mysqlUnionQuery.setLimit(parseLimit()); - } - return super.unionRest(union); + accept(Token.LPAREN); + if (lexer.token() == Token.PRIMARY) { + lexer.nextToken(); + hint.getIndexList().add(new SQLIdentifierExpr("PRIMARY")); + } else { + this.exprParser.names(hint.getIndexList()); } + accept(Token.RPAREN); + } - public MySqlSelectQueryBlock.Limit parseLimit() { - return ((ElasticSqlExprParser) this.exprParser).parseLimit(); - } + protected MySqlUnionQuery createSQLUnionQuery() { + return new MySqlUnionQuery(); + } - public ElasticSqlExprParser getExprParser() { - return (ElasticSqlExprParser) exprParser; + public SQLUnionQuery unionRest(SQLUnionQuery union) { + if (lexer.token() == Token.LIMIT) { + MySqlUnionQuery mysqlUnionQuery = (MySqlUnionQuery) union; + mysqlUnionQuery.setLimit(parseLimit()); } + return super.unionRest(union); + } + + public MySqlSelectQueryBlock.Limit parseLimit() { + return ((ElasticSqlExprParser) this.exprParser).parseLimit(); + } + + public ElasticSqlExprParser getExprParser() { + return (ElasticSqlExprParser) exprParser; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java index 89e9a16d1c..da08f81453 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -41,369 +40,396 @@ import org.opensearch.sql.legacy.utils.Util; /** - * - * * @author ansj */ public class FieldMaker { - private SQLFunctions sqlFunctions = new SQLFunctions(); - - public Field makeField(SQLExpr expr, String alias, String tableAlias) throws SqlParseException { - Field field = makeFieldImpl(expr, alias, tableAlias); - addGroupByForDistinctFieldsInSelect(expr, field); + private SQLFunctions sqlFunctions = new SQLFunctions(); - // why we may get null as a field??? - if (field != null) { - field.setExpression(expr); - } + public Field makeField(SQLExpr expr, String alias, String tableAlias) throws SqlParseException { + Field field = makeFieldImpl(expr, alias, tableAlias); + addGroupByForDistinctFieldsInSelect(expr, field); - return field; + // why we may get null as a field??? + if (field != null) { + field.setExpression(expr); } - private Field makeFieldImpl(SQLExpr expr, String alias, String tableAlias) throws SqlParseException { - if (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr || expr instanceof SQLVariantRefExpr) { - return handleIdentifier(expr, alias, tableAlias); - } else if (expr instanceof SQLQueryExpr) { - throw new SqlParseException("unknown field name : " + expr); - } else if (expr instanceof SQLBinaryOpExpr) { - //make a SCRIPT method field; - return makeFieldImpl(makeBinaryMethodField((SQLBinaryOpExpr) expr, alias, true), alias, tableAlias); - } else if (expr instanceof SQLAllColumnExpr) { - return Field.STAR; - } else if (expr instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) expr; - - String methodName = mExpr.getMethodName(); - - if (methodName.equalsIgnoreCase("nested") || methodName.equalsIgnoreCase("reverse_nested")) { - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(mExpr)) { - return handleIdentifier(nestedType, alias, tableAlias); - } - } else if (methodName.equalsIgnoreCase("children")) { - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(mExpr)) { - return handleIdentifier(childrenType, alias, tableAlias); - } - } else if (methodName.equalsIgnoreCase("filter")) { - return makeFilterMethodField(mExpr, alias); - } - - if ((SQLFunctions.builtInFunctions.contains(methodName.toLowerCase())) && Strings.isNullOrEmpty(alias)) { - alias = mExpr.toString(); - } - return makeMethodField(methodName, mExpr.getParameters(), null, alias, tableAlias, true); - } else if (expr instanceof SQLAggregateExpr) { - SQLAggregateExpr sExpr = (SQLAggregateExpr) expr; - return makeMethodField(sExpr.getMethodName(), sExpr.getArguments(), sExpr.getOption(), - alias, tableAlias, true); - } else if (expr instanceof SQLCaseExpr) { - String scriptCode = new CaseWhenParser((SQLCaseExpr) expr, alias, tableAlias).parse(); - List methodParameters = new ArrayList<>(); - methodParameters.add(new KVValue(alias)); - methodParameters.add(new KVValue(scriptCode)); - return new MethodField("script", methodParameters, null, alias); - } else if (expr instanceof SQLCastExpr) { - SQLCastExpr castExpr = (SQLCastExpr) expr; - if (alias == null) { - alias = "cast_" + castExpr.getExpr().toString(); - } - ArrayList methodParameters = new ArrayList<>(); - methodParameters.add(((SQLCastExpr) expr).getExpr()); - return makeMethodField("CAST", methodParameters, null, alias, tableAlias, true); - } else if (expr instanceof SQLNumericLiteralExpr) { - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("assign", null); - methodInvokeExpr.addParameter(expr); - return makeMethodField(methodInvokeExpr.getMethodName(), methodInvokeExpr.getParameters(), - null, alias, tableAlias, true); - } else { - throw new SqlParseException("unknown field name : " + expr); + return field; + } + + private Field makeFieldImpl(SQLExpr expr, String alias, String tableAlias) + throws SqlParseException { + if (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr) { + return handleIdentifier(expr, alias, tableAlias); + } else if (expr instanceof SQLQueryExpr) { + throw new SqlParseException("unknown field name : " + expr); + } else if (expr instanceof SQLBinaryOpExpr) { + // make a SCRIPT method field; + return makeFieldImpl( + makeBinaryMethodField((SQLBinaryOpExpr) expr, alias, true), alias, tableAlias); + } else if (expr instanceof SQLAllColumnExpr) { + return Field.STAR; + } else if (expr instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) expr; + + String methodName = mExpr.getMethodName(); + + if (methodName.equalsIgnoreCase("nested") || methodName.equalsIgnoreCase("reverse_nested")) { + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(mExpr)) { + return handleIdentifier(nestedType, alias, tableAlias); } - } - - private void addGroupByForDistinctFieldsInSelect(SQLExpr expr, Field field) { - if (expr.getParent() != null && expr.getParent() instanceof SQLSelectItem - && expr.getParent().getParent() != null - && expr.getParent().getParent() instanceof SQLSelectQueryBlock) { - SQLSelectQueryBlock queryBlock = (SQLSelectQueryBlock) expr.getParent().getParent(); - if (queryBlock.getDistionOption() == SQLSetQuantifier.DISTINCT) { - SQLAggregateOption option = SQLAggregateOption.DISTINCT; - field.setAggregationOption(option); - if (queryBlock.getGroupBy() == null) { - queryBlock.setGroupBy(new SQLSelectGroupByClause()); - } - SQLSelectGroupByClause groupByClause = queryBlock.getGroupBy(); - groupByClause.addItem(expr); - queryBlock.setGroupBy(groupByClause); - } + } else if (methodName.equalsIgnoreCase("children")) { + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(mExpr)) { + return handleIdentifier(childrenType, alias, tableAlias); } + } else if (methodName.equalsIgnoreCase("filter")) { + return makeFilterMethodField(mExpr, alias); + } + + if ((SQLFunctions.builtInFunctions.contains(methodName.toLowerCase())) + && Strings.isNullOrEmpty(alias)) { + alias = mExpr.toString(); + } + return makeMethodField(methodName, mExpr.getParameters(), null, alias, tableAlias, true); + } else if (expr instanceof SQLAggregateExpr) { + SQLAggregateExpr sExpr = (SQLAggregateExpr) expr; + return makeMethodField( + sExpr.getMethodName(), sExpr.getArguments(), sExpr.getOption(), alias, tableAlias, true); + } else if (expr instanceof SQLCaseExpr) { + String scriptCode = new CaseWhenParser((SQLCaseExpr) expr, alias, tableAlias).parse(); + List methodParameters = new ArrayList<>(); + methodParameters.add(new KVValue(alias)); + methodParameters.add(new KVValue(scriptCode)); + return new MethodField("script", methodParameters, null, alias); + } else if (expr instanceof SQLCastExpr) { + SQLCastExpr castExpr = (SQLCastExpr) expr; + if (alias == null) { + alias = "cast_" + castExpr.getExpr().toString(); + } + ArrayList methodParameters = new ArrayList<>(); + methodParameters.add(((SQLCastExpr) expr).getExpr()); + return makeMethodField("CAST", methodParameters, null, alias, tableAlias, true); + } else if (expr instanceof SQLNumericLiteralExpr) { + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("assign", null); + methodInvokeExpr.addParameter(expr); + return makeMethodField( + methodInvokeExpr.getMethodName(), + methodInvokeExpr.getParameters(), + null, + alias, + tableAlias, + true); + } else { + throw new SqlParseException("unknown field name : " + expr); } - - private static Object getScriptValue(SQLExpr expr) throws SqlParseException { - return Util.getScriptValue(expr); - } - - private Field makeScriptMethodField(SQLBinaryOpExpr binaryExpr, String alias, String tableAlias) - throws SqlParseException { - List params = new ArrayList<>(); - - String scriptFieldAlias; - if (alias == null || alias.equals("")) { - scriptFieldAlias = binaryExpr.toString(); - } else { - scriptFieldAlias = alias; + } + + private void addGroupByForDistinctFieldsInSelect(SQLExpr expr, Field field) { + if (expr.getParent() != null + && expr.getParent() instanceof SQLSelectItem + && expr.getParent().getParent() != null + && expr.getParent().getParent() instanceof SQLSelectQueryBlock) { + SQLSelectQueryBlock queryBlock = (SQLSelectQueryBlock) expr.getParent().getParent(); + if (queryBlock.getDistionOption() == SQLSetQuantifier.DISTINCT) { + SQLAggregateOption option = SQLAggregateOption.DISTINCT; + field.setAggregationOption(option); + if (queryBlock.getGroupBy() == null) { + queryBlock.setGroupBy(new SQLSelectGroupByClause()); } - params.add(new SQLCharExpr(scriptFieldAlias)); + SQLSelectGroupByClause groupByClause = queryBlock.getGroupBy(); + groupByClause.addItem(expr); + queryBlock.setGroupBy(groupByClause); + } + } + } - Object left = getScriptValue(binaryExpr.getLeft()); - Object right = getScriptValue(binaryExpr.getRight()); - String script = String.format("%s %s %s", left, binaryExpr.getOperator().getName(), right); + private static Object getScriptValue(SQLExpr expr) throws SqlParseException { + return Util.getScriptValue(expr); + } - params.add(new SQLCharExpr(script)); + private Field makeScriptMethodField(SQLBinaryOpExpr binaryExpr, String alias, String tableAlias) + throws SqlParseException { + List params = new ArrayList<>(); - return makeMethodField("script", params, null, null, tableAlias, false); + String scriptFieldAlias; + if (alias == null || alias.equals("")) { + scriptFieldAlias = binaryExpr.toString(); + } else { + scriptFieldAlias = alias; } + params.add(new SQLCharExpr(scriptFieldAlias)); + Object left = getScriptValue(binaryExpr.getLeft()); + Object right = getScriptValue(binaryExpr.getRight()); + String script = String.format("%s %s %s", left, binaryExpr.getOperator().getName(), right); - private static Field makeFilterMethodField(SQLMethodInvokeExpr filterMethod, String alias) - throws SqlParseException { - List parameters = filterMethod.getParameters(); - int parametersSize = parameters.size(); - if (parametersSize != 1 && parametersSize != 2) { - throw new SqlParseException("filter group by field should only have one or 2 parameters" - + " filter(Expr) or filter(name,Expr)"); - } - String filterAlias = filterMethod.getMethodName(); - SQLExpr exprToCheck = null; - if (parametersSize == 1) { - exprToCheck = parameters.get(0); - filterAlias = "filter(" + exprToCheck.toString().replaceAll("\n", " ") + ")"; - } - if (parametersSize == 2) { - filterAlias = Util.extendedToString(parameters.get(0)); - exprToCheck = parameters.get(1); - } - Where where = Where.newInstance(); - new WhereParser(new SqlParser()).parseWhere(exprToCheck, where); - if (where.getWheres().size() == 0) { - throw new SqlParseException("Failed to parse filter condition"); - } - List methodParameters = new ArrayList<>(); - methodParameters.add(new KVValue("where", where)); - methodParameters.add(new KVValue("alias", filterAlias + "@FILTER")); - return new MethodField("filter", methodParameters, null, alias); - } + params.add(new SQLCharExpr(script)); + return makeMethodField("script", params, null, null, tableAlias, false); + } - private static Field handleIdentifier(NestedType nestedType, String alias, String tableAlias) { - Field field = handleIdentifier(new SQLIdentifierExpr(nestedType.field), alias, tableAlias); - field.setNested(nestedType); - field.setChildren(null); - return field; + private static Field makeFilterMethodField(SQLMethodInvokeExpr filterMethod, String alias) + throws SqlParseException { + List parameters = filterMethod.getParameters(); + int parametersSize = parameters.size(); + if (parametersSize != 1 && parametersSize != 2) { + throw new SqlParseException( + "filter group by field should only have one or 2 parameters" + + " filter(Expr) or filter(name,Expr)"); } - - private static Field handleIdentifier(ChildrenType childrenType, String alias, String tableAlias) { - Field field = handleIdentifier(new SQLIdentifierExpr(childrenType.field), alias, tableAlias); - field.setNested(null); - field.setChildren(childrenType); - return field; + String filterAlias = filterMethod.getMethodName(); + SQLExpr exprToCheck = null; + if (parametersSize == 1) { + exprToCheck = parameters.get(0); + filterAlias = "filter(" + exprToCheck.toString().replaceAll("\n", " ") + ")"; } - - - //binary method can nested - public SQLMethodInvokeExpr makeBinaryMethodField(SQLBinaryOpExpr expr, String alias, boolean first) - throws SqlParseException { - List params = new ArrayList<>(); - - String scriptFieldAlias; - if (first && (alias == null || alias.equals(""))) { - scriptFieldAlias = sqlFunctions.nextId("field"); - } else { - scriptFieldAlias = alias; - } - params.add(new SQLCharExpr(scriptFieldAlias)); - - switch (expr.getOperator()) { - case Add: - return convertBinaryOperatorToMethod("add", expr); - case Multiply: - return convertBinaryOperatorToMethod("multiply", expr); - - case Divide: - return convertBinaryOperatorToMethod("divide", expr); - - case Modulus: - return convertBinaryOperatorToMethod("modulus", expr); - - case Subtract: - return convertBinaryOperatorToMethod("subtract", expr); - default: - throw new SqlParseException("Unsupported operator: " + expr.getOperator().getName()); - } + if (parametersSize == 2) { + filterAlias = Util.extendedToString(parameters.get(0)); + exprToCheck = parameters.get(1); } - - private static SQLMethodInvokeExpr convertBinaryOperatorToMethod(String operator, SQLBinaryOpExpr expr) { - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(operator, null); - methodInvokeExpr.addParameter(expr.getLeft()); - methodInvokeExpr.addParameter(expr.getRight()); - methodInvokeExpr.putAttribute("source", expr); - return methodInvokeExpr; + Where where = Where.newInstance(); + new WhereParser(new SqlParser()).parseWhere(exprToCheck, where); + if (where.getWheres().size() == 0) { + throw new SqlParseException("Failed to parse filter condition"); + } + List methodParameters = new ArrayList<>(); + methodParameters.add(new KVValue("where", where)); + methodParameters.add(new KVValue("alias", filterAlias + "@FILTER")); + return new MethodField("filter", methodParameters, null, alias); + } + + private static Field handleIdentifier(NestedType nestedType, String alias, String tableAlias) { + Field field = handleIdentifier(new SQLIdentifierExpr(nestedType.field), alias, tableAlias); + field.setNested(nestedType); + field.setChildren(null); + return field; + } + + private static Field handleIdentifier( + ChildrenType childrenType, String alias, String tableAlias) { + Field field = handleIdentifier(new SQLIdentifierExpr(childrenType.field), alias, tableAlias); + field.setNested(null); + field.setChildren(childrenType); + return field; + } + + // binary method can nested + public SQLMethodInvokeExpr makeBinaryMethodField( + SQLBinaryOpExpr expr, String alias, boolean first) throws SqlParseException { + List params = new ArrayList<>(); + + String scriptFieldAlias; + if (first && (alias == null || alias.equals(""))) { + scriptFieldAlias = sqlFunctions.nextId("field"); + } else { + scriptFieldAlias = alias; } + params.add(new SQLCharExpr(scriptFieldAlias)); + switch (expr.getOperator()) { + case Add: + return convertBinaryOperatorToMethod("add", expr); + case Multiply: + return convertBinaryOperatorToMethod("multiply", expr); - private static Field handleIdentifier(SQLExpr expr, String alias, String tableAlias) { - String name = expr.toString().replace("`", ""); - String newFieldName = name; - Field field = null; - if (tableAlias != null) { - String aliasPrefix = tableAlias + "."; - if (name.startsWith(aliasPrefix)) { - newFieldName = name.replaceFirst(aliasPrefix, ""); - field = new Field(newFieldName, alias); - } - } + case Divide: + return convertBinaryOperatorToMethod("divide", expr); - if (tableAlias == null) { - field = new Field(newFieldName, alias); - } + case Modulus: + return convertBinaryOperatorToMethod("modulus", expr); - return field; + case Subtract: + return convertBinaryOperatorToMethod("subtract", expr); + default: + throw new SqlParseException("Unsupported operator: " + expr.getOperator().getName()); + } + } + + private static SQLMethodInvokeExpr convertBinaryOperatorToMethod( + String operator, SQLBinaryOpExpr expr) { + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(operator, null); + methodInvokeExpr.addParameter(expr.getLeft()); + methodInvokeExpr.addParameter(expr.getRight()); + methodInvokeExpr.putAttribute("source", expr); + return methodInvokeExpr; + } + + private static Field handleIdentifier(SQLExpr expr, String alias, String tableAlias) { + String name = expr.toString().replace("`", ""); + String newFieldName = name; + Field field = null; + if (tableAlias != null) { + String aliasPrefix = tableAlias + "."; + if (name.startsWith(aliasPrefix)) { + newFieldName = name.replaceFirst(aliasPrefix, ""); + field = new Field(newFieldName, alias); + } } - public MethodField makeMethodField(String name, List arguments, SQLAggregateOption option, - String alias, String tableAlias, boolean first) throws SqlParseException { - List paramers = new LinkedList<>(); - - for (SQLExpr object : arguments) { - - if (object instanceof SQLBinaryOpExpr) { - - SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) object; - - if (SQLFunctions.isFunctionTranslatedToScript(binaryOpExpr.getOperator().toString())) { - SQLMethodInvokeExpr mExpr = makeBinaryMethodField(binaryOpExpr, alias, first); - MethodField abc = makeMethodField(mExpr.getMethodName(), mExpr.getParameters(), - null, null, tableAlias, false); - paramers.add(new KVValue(abc.getParams().get(0).toString(), - new SQLCharExpr(abc.getParams().get(1).toString()))); - } else { - if (!binaryOpExpr.getOperator().getName().equals("=")) { - paramers.add(new KVValue("script", makeScriptMethodField(binaryOpExpr, null, tableAlias))); - } else { - SQLExpr right = binaryOpExpr.getRight(); - Object value = Util.expr2Object(right); - paramers.add(new KVValue(binaryOpExpr.getLeft().toString(), value)); - } - } - - } else if (object instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) object; - String methodName = mExpr.getMethodName().toLowerCase(); - if (methodName.equals("script")) { - KVValue script = new KVValue("script", makeMethodField(mExpr.getMethodName(), mExpr.getParameters(), - null, alias, tableAlias, true)); - paramers.add(script); - } else if (methodName.equals("nested") || methodName.equals("reverse_nested")) { - NestedType nestedType = new NestedType(); - - if (!nestedType.tryFillFromExpr(object)) { - throw new SqlParseException("Failed to parse nested expression: " + object); - } - - // Fix bug: method name of reversed_nested() was set to "nested" wrongly - paramers.add(new KVValue(methodName, nestedType)); - } else if (methodName.equals("children")) { - ChildrenType childrenType = new ChildrenType(); - - if (!childrenType.tryFillFromExpr(object)) { - throw new SqlParseException("Failed to parse children expression: " + object); - } - - paramers.add(new KVValue("children", childrenType)); - } else if (SQLFunctions.isFunctionTranslatedToScript(methodName)) { - //throw new SqlParseException("only support script/nested as inner functions"); - MethodField abc = makeMethodField(methodName, mExpr.getParameters(), null, null, tableAlias, false); - paramers.add(new KVValue(abc.getParams().get(0).toString(), - new SQLCharExpr(abc.getParams().get(1).toString()))); - } else { - throw new SqlParseException("only support script/nested/children as inner functions"); - } - } else if (object instanceof SQLCaseExpr) { - String scriptCode = new CaseWhenParser((SQLCaseExpr) object, alias, tableAlias).parse(); - paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); - } else if (object instanceof SQLCastExpr) { - String castName = sqlFunctions.nextId("cast"); - List methodParameters = new ArrayList<>(); - methodParameters.add(new KVValue(((SQLCastExpr) object).getExpr().toString())); - String castType = ((SQLCastExpr) object).getDataType().getName(); - String scriptCode = sqlFunctions.getCastScriptStatement(castName, castType, methodParameters); - - // Parameter "first" indicates if return statement is required. Take CAST statement nested in - // aggregate function SUM(CAST...) for example, return statement is required in this case. - // Otherwise DSL with metric aggregation always returns 0 as result. And this works also because - // the caller makeFieldImpl(SQLExpr("SUM...")) does pass first=true to here. - if (first) { - scriptCode += "; return " + castName; - } - methodParameters.add(new KVValue(scriptCode)); - paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); - } else if (object instanceof SQLAggregateExpr) { - SQLObject parent = object.getParent(); - SQLExpr source = (SQLExpr) parent.getAttribute("source"); - - if (parent instanceof SQLMethodInvokeExpr && source == null) { - throw new SqlFeatureNotImplementedException( - "Function calls of form '" - + ((SQLMethodInvokeExpr) parent).getMethodName() - + "(" - + ((SQLAggregateExpr) object).getMethodName() - + "(...))' are not implemented yet"); - } - - throw new SqlFeatureNotImplementedException( - "The complex aggregate expressions are not implemented yet: " + source); - } else { - paramers.add(new KVValue(Util.removeTableAilasFromField(object, tableAlias))); - } + if (tableAlias == null) { + field = new Field(newFieldName, alias); + } + return field; + } + + public MethodField makeMethodField( + String name, + List arguments, + SQLAggregateOption option, + String alias, + String tableAlias, + boolean first) + throws SqlParseException { + List paramers = new LinkedList<>(); + + for (SQLExpr object : arguments) { + + if (object instanceof SQLBinaryOpExpr) { + + SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) object; + + if (SQLFunctions.isFunctionTranslatedToScript(binaryOpExpr.getOperator().toString())) { + SQLMethodInvokeExpr mExpr = makeBinaryMethodField(binaryOpExpr, alias, first); + MethodField abc = + makeMethodField( + mExpr.getMethodName(), mExpr.getParameters(), null, null, tableAlias, false); + paramers.add( + new KVValue( + abc.getParams().get(0).toString(), + new SQLCharExpr(abc.getParams().get(1).toString()))); + } else { + if (!binaryOpExpr.getOperator().getName().equals("=")) { + paramers.add( + new KVValue("script", makeScriptMethodField(binaryOpExpr, null, tableAlias))); + } else { + SQLExpr right = binaryOpExpr.getRight(); + Object value = Util.expr2Object(right); + paramers.add(new KVValue(binaryOpExpr.getLeft().toString(), value)); + } } - //just check we can find the function - boolean builtInScriptFunction = SQLFunctions.isFunctionTranslatedToScript(name); - if (builtInScriptFunction) { - if (alias == null && first) { - alias = sqlFunctions.nextId(name); - } - //should check if field and first . - Tuple newFunctions = sqlFunctions.function(name.toLowerCase(), paramers, - paramers.isEmpty() ? null : paramers.get(0).key, first); - paramers.clear(); - if (!first) { - //variance - paramers.add(new KVValue(newFunctions.v1())); - } else { - paramers.add(new KVValue(alias)); - } - - paramers.add(new KVValue(newFunctions.v2())); + } else if (object instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) object; + String methodName = mExpr.getMethodName().toLowerCase(); + if (methodName.equals("script")) { + KVValue script = + new KVValue( + "script", + makeMethodField( + mExpr.getMethodName(), mExpr.getParameters(), null, alias, tableAlias, true)); + paramers.add(script); + } else if (methodName.equals("nested") || methodName.equals("reverse_nested")) { + NestedType nestedType = new NestedType(); + + if (!nestedType.tryFillFromExpr(object)) { + throw new SqlParseException("Failed to parse nested expression: " + object); + } + + // Fix bug: method name of reversed_nested() was set to "nested" wrongly + paramers.add(new KVValue(methodName, nestedType)); + } else if (methodName.equals("children")) { + ChildrenType childrenType = new ChildrenType(); + + if (!childrenType.tryFillFromExpr(object)) { + throw new SqlParseException("Failed to parse children expression: " + object); + } + + paramers.add(new KVValue("children", childrenType)); + } else if (SQLFunctions.isFunctionTranslatedToScript(methodName)) { + // throw new SqlParseException("only support script/nested as inner functions"); + MethodField abc = + makeMethodField(methodName, mExpr.getParameters(), null, null, tableAlias, false); + paramers.add( + new KVValue( + abc.getParams().get(0).toString(), + new SQLCharExpr(abc.getParams().get(1).toString()))); + } else { + throw new SqlParseException("only support script/nested/children as inner functions"); } + } else if (object instanceof SQLCaseExpr) { + String scriptCode = new CaseWhenParser((SQLCaseExpr) object, alias, tableAlias).parse(); + paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); + } else if (object instanceof SQLCastExpr) { + String castName = sqlFunctions.nextId("cast"); + List methodParameters = new ArrayList<>(); + methodParameters.add(new KVValue(((SQLCastExpr) object).getExpr().toString())); + String castType = ((SQLCastExpr) object).getDataType().getName(); + String scriptCode = + sqlFunctions.getCastScriptStatement(castName, castType, methodParameters); + + // Parameter "first" indicates if return statement is required. Take CAST statement nested + // in + // aggregate function SUM(CAST...) for example, return statement is required in this case. + // Otherwise DSL with metric aggregation always returns 0 as result. And this works also + // because + // the caller makeFieldImpl(SQLExpr("SUM...")) does pass first=true to here. if (first) { - List tempParamers = new LinkedList<>(); - for (KVValue temp : paramers) { - if (temp.value instanceof SQLExpr) { - tempParamers.add(new KVValue(temp.key, Util.expr2Object((SQLExpr) temp.value))); - } else { - tempParamers.add(new KVValue(temp.key, temp.value)); - } - } - paramers.clear(); - paramers.addAll(tempParamers); + scriptCode += "; return " + castName; + } + methodParameters.add(new KVValue(scriptCode)); + paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); + } else if (object instanceof SQLAggregateExpr) { + SQLObject parent = object.getParent(); + SQLExpr source = (SQLExpr) parent.getAttribute("source"); + + if (parent instanceof SQLMethodInvokeExpr && source == null) { + throw new SqlFeatureNotImplementedException( + "Function calls of form '" + + ((SQLMethodInvokeExpr) parent).getMethodName() + + "(" + + ((SQLAggregateExpr) object).getMethodName() + + "(...))' are not implemented yet"); } - if (builtInScriptFunction) { - return new ScriptMethodField(name, paramers, option, alias); + throw new SqlFeatureNotImplementedException( + "The complex aggregate expressions are not implemented yet: " + source); + } else { + paramers.add(new KVValue(Util.removeTableAilasFromField(object, tableAlias))); + } + } + + // just check we can find the function + boolean builtInScriptFunction = SQLFunctions.isFunctionTranslatedToScript(name); + if (builtInScriptFunction) { + if (alias == null && first) { + alias = sqlFunctions.nextId(name); + } + // should check if field and first . + Tuple newFunctions = + sqlFunctions.function( + name.toLowerCase(), paramers, paramers.isEmpty() ? null : paramers.get(0).key, first); + paramers.clear(); + if (!first) { + // variance + paramers.add(new KVValue(newFunctions.v1())); + } else { + paramers.add(new KVValue(alias)); + } + + paramers.add(new KVValue(newFunctions.v2())); + } + if (first) { + List tempParamers = new LinkedList<>(); + for (KVValue temp : paramers) { + if (temp.value instanceof SQLExpr) { + tempParamers.add(new KVValue(temp.key, Util.expr2Object((SQLExpr) temp.value))); } else { - return new MethodField(name, paramers, option, alias); + tempParamers.add(new KVValue(temp.key, temp.value)); } + } + paramers.clear(); + paramers.addAll(tempParamers); + } + + if (builtInScriptFunction) { + return new ScriptMethodField(name, paramers, option, alias); + } else { + return new MethodField(name, paramers, option, alias); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java index 307d87f6e8..e0d933a405 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -21,93 +20,89 @@ import org.opensearch.sql.legacy.domain.Where; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Parse expression in the Having clause. - */ +/** Parse expression in the Having clause. */ public class HavingParser { - private final WhereParser whereParser; - private final List havingFields; - private final HavingConditionRewriter havingConditionRewriter; + private final WhereParser whereParser; + private final List havingFields; + private final HavingConditionRewriter havingConditionRewriter; - public HavingParser(WhereParser whereParser) { - this.whereParser = whereParser; - this.havingFields = new ArrayList<>(); - this.havingConditionRewriter = new HavingConditionRewriter(); - } + public HavingParser(WhereParser whereParser) { + this.whereParser = whereParser; + this.havingFields = new ArrayList<>(); + this.havingConditionRewriter = new HavingConditionRewriter(); + } - public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { - expr.accept(havingConditionRewriter); - whereParser.parseWhere(expr, where); - } + public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { + expr.accept(havingConditionRewriter); + whereParser.parseWhere(expr, where); + } - public List getHavingFields() { - return havingFields; - } - - private class HavingConditionRewriter extends MySqlASTVisitorAdapter { - private int aliasSuffix = 0; + public List getHavingFields() { + return havingFields; + } - @Override - public boolean visit(SQLAggregateExpr expr) { - SQLIdentifierExpr translatedExpr = translateAggExpr(expr); - SQLObject parent = expr.getParent(); - // Rewrite {@link SQLAggregateExpr} in {@link SQLBinaryOpExpr}, e.g. HAVING AVG(age) > 30) - if (parent instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr parentOpExpr = (SQLBinaryOpExpr) parent; - if (parentOpExpr.getLeft() == expr) { - parentOpExpr.setLeft(translatedExpr); - } else { - parentOpExpr.setRight(translatedExpr); - } - // Rewrite {@link SQLAggregateExpr} in {@link SQLNotExpr}, e.g. HAVING NOT (AVG(a) > 30) - } else if (parent instanceof SQLNotExpr) { - SQLNotExpr parentNotExpr = (SQLNotExpr) parent; - parentNotExpr.setExpr(translatedExpr); - // Rewrite {@link SQLAggregateExpr} in {@link SQLInListExpr}, e.g. HAVING AVG(a) IN (30, 40, 50) - } else if (parent instanceof SQLInListExpr) { - SQLInListExpr parentInListExpr = (SQLInListExpr) parent; - parentInListExpr.setExpr(translatedExpr); - // Rewrite {@link SQLAggregateExpr} in {@link SQLBetweenExpr}, e.g. HAVING AVG(a) BETWEEN 30, 40 - } else if (parent instanceof SQLBetweenExpr) { - SQLBetweenExpr parentBetweenExpr = (SQLBetweenExpr) parent; - parentBetweenExpr.setTestExpr(translatedExpr); - } else { - throw new IllegalStateException("Unsupported aggregation function in having clause " - + parent.getClass()); - } + private class HavingConditionRewriter extends MySqlASTVisitorAdapter { + private int aliasSuffix = 0; - return true; + @Override + public boolean visit(SQLAggregateExpr expr) { + SQLIdentifierExpr translatedExpr = translateAggExpr(expr); + SQLObject parent = expr.getParent(); + // Rewrite {@link SQLAggregateExpr} in {@link SQLBinaryOpExpr}, e.g. HAVING AVG(age) > 30) + if (parent instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr parentOpExpr = (SQLBinaryOpExpr) parent; + if (parentOpExpr.getLeft() == expr) { + parentOpExpr.setLeft(translatedExpr); + } else { + parentOpExpr.setRight(translatedExpr); } + // Rewrite {@link SQLAggregateExpr} in {@link SQLNotExpr}, e.g. HAVING NOT (AVG(a) > 30) + } else if (parent instanceof SQLNotExpr) { + SQLNotExpr parentNotExpr = (SQLNotExpr) parent; + parentNotExpr.setExpr(translatedExpr); + // Rewrite {@link SQLAggregateExpr} in {@link SQLInListExpr}, e.g. HAVING AVG(a) IN (30, 40, + // 50) + } else if (parent instanceof SQLInListExpr) { + SQLInListExpr parentInListExpr = (SQLInListExpr) parent; + parentInListExpr.setExpr(translatedExpr); + // Rewrite {@link SQLAggregateExpr} in {@link SQLBetweenExpr}, e.g. HAVING AVG(a) BETWEEN + // 30, 40 + } else if (parent instanceof SQLBetweenExpr) { + SQLBetweenExpr parentBetweenExpr = (SQLBetweenExpr) parent; + parentBetweenExpr.setTestExpr(translatedExpr); + } else { + throw new IllegalStateException( + "Unsupported aggregation function in having clause " + parent.getClass()); + } - /** - * If the expr is {@link SQLAggregateExpr} - * 1) rewrite as {@link SQLIdentifierExpr} - * 2) add the {@link SQLIdentifierExpr} to the havingFields - *

- * For example, the COUNT(age) is the {@link SQLAggregateExpr} in expression COUNT(age) > 1 - * 1) parsing COUNT(age) as {@link SQLIdentifierExpr} count_1 - * 2) return {@link SQLIdentifierExpr} count_1 to the havingFields - */ - private SQLIdentifierExpr translateAggExpr(SQLAggregateExpr expr) { - String methodAlias = methodAlias(expr.getMethodName()); - SQLIdentifierExpr sqlExpr = new SQLIdentifierExpr(methodAlias); - try { - havingFields.add(new FieldMaker().makeField( - expr, - methodAlias, - null)); - return sqlExpr; - } catch (SqlParseException e) { - throw new IllegalStateException(e); - } - } + return true; + } - private String methodAlias(String methodName) { - return String.format("%s_%d", methodName.toLowerCase(), nextAlias()); - } + /** + * If the expr is {@link SQLAggregateExpr} 1) rewrite as {@link SQLIdentifierExpr} 2) add the + * {@link SQLIdentifierExpr} to the havingFields + * + *

For example, the COUNT(age) is the {@link SQLAggregateExpr} in expression COUNT(age) > 1 + * 1) parsing COUNT(age) as {@link SQLIdentifierExpr} count_1 2) return {@link + * SQLIdentifierExpr} count_1 to the havingFields + */ + private SQLIdentifierExpr translateAggExpr(SQLAggregateExpr expr) { + String methodAlias = methodAlias(expr.getMethodName()); + SQLIdentifierExpr sqlExpr = new SQLIdentifierExpr(methodAlias); + try { + havingFields.add(new FieldMaker().makeField(expr, methodAlias, null)); + return sqlExpr; + } catch (SqlParseException e) { + throw new IllegalStateException(e); + } + } - private Integer nextAlias() { - return aliasSuffix++; - } + private String methodAlias(String methodName) { + return String.format("%s_%d", methodName.toLowerCase(), nextAlias()); + } + + private Integer nextAlias() { + return aliasSuffix++; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java index 3ab8c11ee0..c8b44e1bbb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.List; import java.util.Map; import org.opensearch.sql.legacy.domain.Field; -/** - * Created by Eliran on 22/8/2015. - */ +/** Created by Eliran on 22/8/2015. */ public class HashJoinElasticRequestBuilder extends JoinRequestBuilder { - private List>> t1ToT2FieldsComparison; - private boolean useTermFiltersOptimization; + private List>> t1ToT2FieldsComparison; + private boolean useTermFiltersOptimization; - public HashJoinElasticRequestBuilder() { - } + public HashJoinElasticRequestBuilder() {} - @Override - public String explain() { - return "HashJoin " + super.explain(); - } + @Override + public String explain() { + return "HashJoin " + super.explain(); + } - public List>> getT1ToT2FieldsComparison() { - return t1ToT2FieldsComparison; - } + public List>> getT1ToT2FieldsComparison() { + return t1ToT2FieldsComparison; + } - public void setT1ToT2FieldsComparison(List>> t1ToT2FieldsComparison) { - this.t1ToT2FieldsComparison = t1ToT2FieldsComparison; - } + public void setT1ToT2FieldsComparison( + List>> t1ToT2FieldsComparison) { + this.t1ToT2FieldsComparison = t1ToT2FieldsComparison; + } - public boolean isUseTermFiltersOptimization() { - return useTermFiltersOptimization; - } + public boolean isUseTermFiltersOptimization() { + return useTermFiltersOptimization; + } - public void setUseTermFiltersOptimization(boolean useTermFiltersOptimization) { - this.useTermFiltersOptimization = useTermFiltersOptimization; - } + public void setUseTermFiltersOptimization(boolean useTermFiltersOptimization) { + this.useTermFiltersOptimization = useTermFiltersOptimization; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java index 316d17a275..82ebd1b225 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -18,95 +17,99 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class JoinRequestBuilder implements SqlElasticRequestBuilder { - private MultiSearchRequest multi; - private TableInJoinRequestBuilder firstTable; - private TableInJoinRequestBuilder secondTable; - private SQLJoinTableSource.JoinType joinType; - private int totalLimit; - - public JoinRequestBuilder() { - firstTable = new TableInJoinRequestBuilder(); - secondTable = new TableInJoinRequestBuilder(); - } - - - @Override - public ActionRequest request() { - if (multi == null) { - buildMulti(); - } - return multi; - - } - - private void buildMulti() { - multi = new MultiSearchRequest(); - multi.add(firstTable.getRequestBuilder()); - multi.add(secondTable.getRequestBuilder()); - } - - @Override - public String explain() { - try { - XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); - firstTable.getRequestBuilder().request().source().toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); - - XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); - secondTable.getRequestBuilder().request().source().toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); - return String.format(" first query:\n%s\n second query:\n%s", - BytesReference.bytes(firstBuilder).utf8ToString(), - BytesReference.bytes(secondBuilder).utf8ToString()); - } catch (IOException e) { - e.printStackTrace(); - } - return null; - } - - @Override - public ActionResponse get() { - return null; - } - - @Override - public ActionRequestBuilder getBuilder() { - return this.firstTable.getRequestBuilder(); + private MultiSearchRequest multi; + private TableInJoinRequestBuilder firstTable; + private TableInJoinRequestBuilder secondTable; + private SQLJoinTableSource.JoinType joinType; + private int totalLimit; + + public JoinRequestBuilder() { + firstTable = new TableInJoinRequestBuilder(); + secondTable = new TableInJoinRequestBuilder(); + } + + @Override + public ActionRequest request() { + if (multi == null) { + buildMulti(); } - - public MultiSearchRequest getMulti() { - return multi; + return multi; + } + + private void buildMulti() { + multi = new MultiSearchRequest(); + multi.add(firstTable.getRequestBuilder()); + multi.add(secondTable.getRequestBuilder()); + } + + @Override + public String explain() { + try { + XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); + firstTable + .getRequestBuilder() + .request() + .source() + .toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); + + XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); + secondTable + .getRequestBuilder() + .request() + .source() + .toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); + return String.format( + " first query:\n%s\n second query:\n%s", + BytesReference.bytes(firstBuilder).utf8ToString(), + BytesReference.bytes(secondBuilder).utf8ToString()); + } catch (IOException e) { + e.printStackTrace(); } - - public void setMulti(MultiSearchRequest multi) { - this.multi = multi; - } - - public SQLJoinTableSource.JoinType getJoinType() { - return joinType; - } - - public void setJoinType(SQLJoinTableSource.JoinType joinType) { - this.joinType = joinType; - } - - public TableInJoinRequestBuilder getFirstTable() { - return firstTable; - } - - public TableInJoinRequestBuilder getSecondTable() { - return secondTable; - } - - public int getTotalLimit() { - return totalLimit; - } - - public void setTotalLimit(int totalLimit) { - this.totalLimit = totalLimit; - } - + return null; + } + + @Override + public ActionResponse get() { + return null; + } + + @Override + public ActionRequestBuilder getBuilder() { + return this.firstTable.getRequestBuilder(); + } + + public MultiSearchRequest getMulti() { + return multi; + } + + public void setMulti(MultiSearchRequest multi) { + this.multi = multi; + } + + public SQLJoinTableSource.JoinType getJoinType() { + return joinType; + } + + public void setJoinType(SQLJoinTableSource.JoinType joinType) { + this.joinType = joinType; + } + + public TableInJoinRequestBuilder getFirstTable() { + return firstTable; + } + + public TableInJoinRequestBuilder getSecondTable() { + return secondTable; + } + + public int getTotalLimit() { + return totalLimit; + } + + public void setTotalLimit(int totalLimit) { + this.totalLimit = totalLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java index 08018d94de..302af70ea8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.maker; import static org.opensearch.sql.legacy.parser.WhereParser.getConditionForMethod; @@ -66,460 +65,472 @@ public abstract class Maker { - /** - * UTC. - */ - private static final ZoneId UTC = ZoneId.of("UTC"); - - public static final Object NONE = new Object(); - - public static final Set queryFunctions = Sets.newHashSet( - "query", - "matchquery", "match_query", // match - "multimatchquery", "multi_match", "multimatch", // multi-match - "score", "scorequery", "score_query", // score - "wildcardquery", "wildcard_query", // wildcard - "matchphrasequery", "match_phrase", "matchphrase" // match-phrase - ); - - private static final Set NOT_OPERATOR_SET = ImmutableSet.of( - Condition.OPERATOR.N, Condition.OPERATOR.NIN, Condition.OPERATOR.ISN, Condition.OPERATOR.NBETWEEN, - Condition.OPERATOR.NLIKE, Condition.OPERATOR.NIN_TERMS, Condition.OPERATOR.NTERM, - Condition.OPERATOR.NOT_EXISTS_NESTED_COMPLEX, Condition.OPERATOR.NREGEXP - ); - - protected Maker(Boolean isQuery) { - + /** UTC. */ + private static final ZoneId UTC = ZoneId.of("UTC"); + + public static final Object NONE = new Object(); + + public static final Set queryFunctions = + Sets.newHashSet( + "query", + "matchquery", + "match_query", // match + "multimatchquery", + "multi_match", + "multimatch", // multi-match + "score", + "scorequery", + "score_query", // score + "wildcardquery", + "wildcard_query", // wildcard + "matchphrasequery", + "match_phrase", + "matchphrase" // match-phrase + ); + + private static final Set NOT_OPERATOR_SET = + ImmutableSet.of( + Condition.OPERATOR.N, + Condition.OPERATOR.NIN, + Condition.OPERATOR.ISN, + Condition.OPERATOR.NBETWEEN, + Condition.OPERATOR.NLIKE, + Condition.OPERATOR.NIN_TERMS, + Condition.OPERATOR.NTERM, + Condition.OPERATOR.NOT_EXISTS_NESTED_COMPLEX, + Condition.OPERATOR.NREGEXP); + + protected Maker(Boolean isQuery) {} + + /** + * @param cond + * @return + * @throws SqlParseException + */ + protected ToXContent make(Condition cond) throws SqlParseException { + + String name = cond.getName(); + Object value = cond.getValue(); + + ToXContent toXContent = null; + + if (value instanceof SQLMethodInvokeExpr) { + toXContent = make(cond, name, (SQLMethodInvokeExpr) value); + } else if (value instanceof SubQueryExpression) { + toXContent = make(cond, name, ((SubQueryExpression) value).getValues()); + } else { + if (cond.getValue() == NONE) { + toXContent = new MatchNoneQueryBuilder(); + } else { + toXContent = make(cond, name, value); + } } - /** - * - * - * @param cond - * @return - * @throws SqlParseException - */ - protected ToXContent make(Condition cond) throws SqlParseException { - - String name = cond.getName(); - Object value = cond.getValue(); - - ToXContent toXContent = null; - - if (value instanceof SQLMethodInvokeExpr) { - toXContent = make(cond, name, (SQLMethodInvokeExpr) value); - } else if (value instanceof SubQueryExpression) { - toXContent = make(cond, name, ((SubQueryExpression) value).getValues()); - } else { - if (cond.getValue() == NONE) { - toXContent = new MatchNoneQueryBuilder(); - } else { - toXContent = make(cond, name, value); - } + return toXContent; + } + + private ToXContent make(Condition cond, String name, SQLMethodInvokeExpr value) + throws SqlParseException { + ToXContent bqb = null; + Paramer paramer = null; + switch (value.getMethodName().toLowerCase()) { + case "query": + paramer = Paramer.parseParamer(value); + QueryStringQueryBuilder queryString = QueryBuilders.queryStringQuery(paramer.value); + bqb = Paramer.fullParamer(queryString, paramer); + bqb = applyNot(cond.getOPERATOR(), bqb); + break; + case "matchquery": + case "match_query": + paramer = Paramer.parseParamer(value); + MatchQueryBuilder matchQuery = QueryBuilders.matchQuery(name, paramer.value); + bqb = Paramer.fullParamer(matchQuery, paramer); + bqb = applyNot(cond.getOPERATOR(), bqb); + break; + case "score": + case "scorequery": + case "score_query": + Float boost = Float.parseFloat(value.getParameters().get(1).toString()); + Condition subCond = getConditionForMethod(value.getParameters().get(0), cond.getConn()); + QueryBuilder subQuery = (QueryBuilder) make(subCond); + if (subCond.isNested()) { + subQuery = QueryBuilders.nestedQuery(subCond.getNestedPath(), subQuery, ScoreMode.None); } - - return toXContent; + bqb = QueryBuilders.constantScoreQuery(subQuery).boost(boost); + break; + case "wildcardquery": + case "wildcard_query": + paramer = Paramer.parseParamer(value); + WildcardQueryBuilder wildcardQuery = QueryBuilders.wildcardQuery(name, paramer.value); + bqb = Paramer.fullParamer(wildcardQuery, paramer); + break; + + case "matchphrasequery": + case "match_phrase": + case "matchphrase": + paramer = Paramer.parseParamer(value); + MatchPhraseQueryBuilder matchPhraseQuery = + QueryBuilders.matchPhraseQuery(name, paramer.value); + bqb = Paramer.fullParamer(matchPhraseQuery, paramer); + break; + + case "multimatchquery": + case "multi_match": + case "multimatch": + paramer = Paramer.parseParamer(value); + MultiMatchQueryBuilder multiMatchQuery = + QueryBuilders.multiMatchQuery(paramer.value).fields(paramer.fieldsBoosts); + bqb = Paramer.fullParamer(multiMatchQuery, paramer); + break; + default: + throw new SqlParseException( + "The following query method is not supported: " + value.getMethodName()); } - private ToXContent make(Condition cond, String name, SQLMethodInvokeExpr value) throws SqlParseException { - ToXContent bqb = null; - Paramer paramer = null; - switch (value.getMethodName().toLowerCase()) { - case "query": - paramer = Paramer.parseParamer(value); - QueryStringQueryBuilder queryString = QueryBuilders.queryStringQuery(paramer.value); - bqb = Paramer.fullParamer(queryString, paramer); - bqb = applyNot(cond.getOPERATOR(), bqb); - break; - case "matchquery": - case "match_query": - paramer = Paramer.parseParamer(value); - MatchQueryBuilder matchQuery = QueryBuilders.matchQuery(name, paramer.value); - bqb = Paramer.fullParamer(matchQuery, paramer); - bqb = applyNot(cond.getOPERATOR(), bqb); - break; - case "score": - case "scorequery": - case "score_query": - Float boost = Float.parseFloat(value.getParameters().get(1).toString()); - Condition subCond = getConditionForMethod(value.getParameters().get(0), cond.getConn()); - QueryBuilder subQuery = (QueryBuilder) make(subCond); - if (subCond.isNested()) { - subQuery = QueryBuilders.nestedQuery(subCond.getNestedPath(), subQuery, ScoreMode.None); - } - bqb = QueryBuilders.constantScoreQuery(subQuery).boost(boost); - break; - case "wildcardquery": - case "wildcard_query": - paramer = Paramer.parseParamer(value); - WildcardQueryBuilder wildcardQuery = QueryBuilders.wildcardQuery(name, paramer.value); - bqb = Paramer.fullParamer(wildcardQuery, paramer); - break; - - case "matchphrasequery": - case "match_phrase": - case "matchphrase": - paramer = Paramer.parseParamer(value); - MatchPhraseQueryBuilder matchPhraseQuery = QueryBuilders.matchPhraseQuery(name, paramer.value); - bqb = Paramer.fullParamer(matchPhraseQuery, paramer); - break; - - case "multimatchquery": - case "multi_match": - case "multimatch": - paramer = Paramer.parseParamer(value); - MultiMatchQueryBuilder multiMatchQuery = QueryBuilders.multiMatchQuery(paramer.value) - .fields(paramer.fieldsBoosts); - bqb = Paramer.fullParamer(multiMatchQuery, paramer); - break; - default: - throw new SqlParseException("The following query method is not supported: " + value.getMethodName()); + return bqb; + } + + private ToXContent make(Condition cond, String name, Object value) throws SqlParseException { + ToXContent toXContent = null; + switch (cond.getOPERATOR()) { + case ISN: + case IS: + case N: + case EQ: + if (value == null || value instanceof SQLIdentifierExpr) { + // todo: change to exists + if (value == null || ((SQLIdentifierExpr) value).getName().equalsIgnoreCase("missing")) { + toXContent = QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(name)); + } else { + throw new SqlParseException( + String.format( + "Cannot recoginze Sql identifer %s", ((SQLIdentifierExpr) value).getName())); + } + break; + } else { + toXContent = QueryBuilders.termQuery(name, value); + break; } - - return bqb; - } - - private ToXContent make(Condition cond, String name, Object value) throws SqlParseException { - ToXContent toXContent = null; - switch (cond.getOPERATOR()) { - case ISN: - case IS: - case N: - case EQ: - if (value == null || value instanceof SQLIdentifierExpr) { - //todo: change to exists - if (value == null || ((SQLIdentifierExpr) value).getName().equalsIgnoreCase("missing")) { - toXContent = QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(name)); - } else { - throw new SqlParseException(String.format("Cannot recoginze Sql identifer %s", - ((SQLIdentifierExpr) value).getName())); - } - break; - } else { - toXContent = QueryBuilders.termQuery(name, value); - break; - } - case LIKE: - case NLIKE: - String queryStr = ((String) value); - queryStr = queryStr.replace('%', '*').replace('_', '?'); - queryStr = queryStr.replace("&PERCENT", "%").replace("&UNDERSCORE", "_"); - toXContent = QueryBuilders.wildcardQuery(name, queryStr); - break; - case REGEXP: - case NREGEXP: - Object[] values = (Object[]) value; - RegexpQueryBuilder regexpQuery = QueryBuilders.regexpQuery(name, values[0].toString()); - if (1 < values.length) { - String[] flags = values[1].toString().split("\\|"); - RegexpFlag[] regexpFlags = new RegexpFlag[flags.length]; - for (int i = 0; i < flags.length; ++i) { - regexpFlags[i] = RegexpFlag.valueOf(flags[i]); - } - regexpQuery.flags(regexpFlags); - } - if (2 < values.length) { - regexpQuery.maxDeterminizedStates(Integer.parseInt(values[2].toString())); - } - toXContent = regexpQuery; - break; - case GT: - toXContent = QueryBuilders.rangeQuery(name).gt(value); - break; - case GTE: - toXContent = QueryBuilders.rangeQuery(name).gte(value); - break; - case LT: - toXContent = QueryBuilders.rangeQuery(name).lt(value); - break; - case LTE: - toXContent = QueryBuilders.rangeQuery(name).lte(value); - break; - case NIN: - case IN: - //todo: value is subquery? here or before - values = (Object[]) value; - TermQueryBuilder[] termQueries = new TermQueryBuilder[values.length]; - for (int i = 0; i < values.length; i++) { - termQueries[i] = QueryBuilders.termQuery(name, values[i]); - } - - BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); - for (TermQueryBuilder termQuery : termQueries) { - boolQuery.should(termQuery); - } - toXContent = boolQuery; - break; - case BETWEEN: - case NBETWEEN: - toXContent = QueryBuilders.rangeQuery(name).gte(((Object[]) value)[0]).lte(((Object[]) value)[1]); - break; - case GEO_INTERSECTS: - String wkt = cond.getValue().toString(); - try { - ShapeBuilder shapeBuilder = getShapeBuilderFromString(wkt); - toXContent = QueryBuilders.geoShapeQuery(cond.getName(), shapeBuilder); - } catch (IOException e) { - e.printStackTrace(); - throw new SqlParseException(StringUtils.format("Failed to create shapeBuilder from [%s]", wkt)); - } - break; - case GEO_BOUNDING_BOX: - BoundingBoxFilterParams boxFilterParams = (BoundingBoxFilterParams) cond.getValue(); - Point topLeft = boxFilterParams.getTopLeft(); - Point bottomRight = boxFilterParams.getBottomRight(); - toXContent = QueryBuilders.geoBoundingBoxQuery(cond.getName()).setCorners(topLeft.getLat(), - topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon()); - break; - case GEO_DISTANCE: - DistanceFilterParams distanceFilterParams = (DistanceFilterParams) cond.getValue(); - Point fromPoint = distanceFilterParams.getFrom(); - String distance = trimApostrophes(distanceFilterParams.getDistance()); - toXContent = QueryBuilders.geoDistanceQuery(cond.getName()).distance(distance) - .point(fromPoint.getLat(), fromPoint.getLon()); - break; - case GEO_POLYGON: - PolygonFilterParams polygonFilterParams = (PolygonFilterParams) cond.getValue(); - ArrayList geoPoints = new ArrayList(); - for (Point p : polygonFilterParams.getPolygon()) { - geoPoints.add(new GeoPoint(p.getLat(), p.getLon())); - } - GeoPolygonQueryBuilder polygonFilterBuilder = QueryBuilders.geoPolygonQuery(cond.getName(), geoPoints); - toXContent = polygonFilterBuilder; - break; - case NIN_TERMS: - case IN_TERMS: - Object[] termValues = (Object[]) value; - if (termValues.length == 1 && termValues[0] instanceof SubQueryExpression) { - termValues = ((SubQueryExpression) termValues[0]).getValues(); - } - Object[] termValuesObjects = new Object[termValues.length]; - for (int i = 0; i < termValues.length; i++) { - termValuesObjects[i] = parseTermValue(termValues[i]); - } - toXContent = QueryBuilders.termsQuery(name, termValuesObjects); - break; - case NTERM: - case TERM: - Object term = ((Object[]) value)[0]; - toXContent = QueryBuilders.termQuery(name, parseTermValue(term)); - break; - case IDS_QUERY: - Object[] idsParameters = (Object[]) value; - String[] ids; - if (idsParameters.length == 2 && idsParameters[1] instanceof SubQueryExpression) { - Object[] idsFromSubQuery = ((SubQueryExpression) idsParameters[1]).getValues(); - ids = arrayOfObjectsToStringArray(idsFromSubQuery, 0, idsFromSubQuery.length - 1); - } else { - ids = arrayOfObjectsToStringArray(idsParameters, 1, idsParameters.length - 1); - } - toXContent = QueryBuilders.idsQuery().addIds(ids); - break; - case NESTED_COMPLEX: - case NOT_EXISTS_NESTED_COMPLEX: - if (value == null || !(value instanceof Where)) { - throw new SqlParseException("unsupported nested condition"); - } - - Where whereNested = (Where) value; - BoolQueryBuilder nestedFilter = QueryMaker.explain(whereNested); - - toXContent = QueryBuilders.nestedQuery(name, nestedFilter, ScoreMode.None); - break; - case CHILDREN_COMPLEX: - if (value == null || !(value instanceof Where)) { - throw new SqlParseException("unsupported nested condition"); - } - - Where whereChildren = (Where) value; - BoolQueryBuilder childrenFilter = QueryMaker.explain(whereChildren); - //todo: pass score mode - toXContent = JoinQueryBuilders.hasChildQuery(name, childrenFilter, ScoreMode.None); - - break; - case SCRIPT: - ScriptFilter scriptFilter = (ScriptFilter) value; - Map params = new HashMap<>(); - if (scriptFilter.containsParameters()) { - params = scriptFilter.getArgs(); - } - - SQLExpr nameExpr = cond.getNameExpr(); - SQLExpr valueExpr = cond.getValueExpr(); - if (nameExpr instanceof SQLMethodInvokeExpr - && ((SQLMethodInvokeExpr) nameExpr).getMethodName().equalsIgnoreCase("date_format")) { - toXContent = makeForDateFormat((SQLMethodInvokeExpr) nameExpr, (SQLCharExpr) valueExpr); - } else { - toXContent = QueryBuilders.scriptQuery( - new Script( - scriptFilter.getScriptType(), - Script.DEFAULT_SCRIPT_LANG, - scriptFilter.getScript(), - params)); - } - break; - default: - throw new SqlParseException("Undefined condition: " + cond.getName()); + case LIKE: + case NLIKE: + String queryStr = ((String) value); + queryStr = queryStr.replace('%', '*').replace('_', '?'); + queryStr = queryStr.replace("&PERCENT", "%").replace("&UNDERSCORE", "_"); + toXContent = QueryBuilders.wildcardQuery(name, queryStr); + break; + case REGEXP: + case NREGEXP: + Object[] values = (Object[]) value; + RegexpQueryBuilder regexpQuery = QueryBuilders.regexpQuery(name, values[0].toString()); + if (1 < values.length) { + String[] flags = values[1].toString().split("\\|"); + RegexpFlag[] regexpFlags = new RegexpFlag[flags.length]; + for (int i = 0; i < flags.length; ++i) { + regexpFlags[i] = RegexpFlag.valueOf(flags[i]); + } + regexpQuery.flags(regexpFlags); + } + if (2 < values.length) { + regexpQuery.maxDeterminizedStates(Integer.parseInt(values[2].toString())); + } + toXContent = regexpQuery; + break; + case GT: + toXContent = QueryBuilders.rangeQuery(name).gt(value); + break; + case GTE: + toXContent = QueryBuilders.rangeQuery(name).gte(value); + break; + case LT: + toXContent = QueryBuilders.rangeQuery(name).lt(value); + break; + case LTE: + toXContent = QueryBuilders.rangeQuery(name).lte(value); + break; + case NIN: + case IN: + // todo: value is subquery? here or before + values = (Object[]) value; + TermQueryBuilder[] termQueries = new TermQueryBuilder[values.length]; + for (int i = 0; i < values.length; i++) { + termQueries[i] = QueryBuilders.termQuery(name, values[i]); } - toXContent = applyNot(cond.getOPERATOR(), toXContent); - return toXContent; - } - - public static boolean isQueryFunction(String methodName) { - return queryFunctions.contains(methodName.toLowerCase()); - } - - /** - * Helper method used to form a range query object for the date_format function. - *

- * Example: WHERE date_format(dateField, "YYYY-MM-dd") > "2012-01-01" - * Expected range query: - * "range": { - * "dateField": { - * "from": "2012-01-01", - * "to": null, - * "include_lower": false, - * "include_upper": true, - * "time_zone": "America/Los_Angeles", - * "format": "YYYY-MM-dd", - * "boost": 1 - * } - * } - * - * @param nameExpr SQL method expression (ex. date_format(dateField, "YYYY-MM-dd")) - * @param valueExpr Value expression being compared to the SQL method result (ex. "2012-01-01") - * @throws SqlParseException - */ - private ToXContent makeForDateFormat(SQLMethodInvokeExpr nameExpr, SQLCharExpr valueExpr) throws SqlParseException { - ToXContent toXContent = null; - List params = nameExpr.getParameters(); - - String field = params.get(0).toString(); - String format = removeSingleQuote(params.get(1).toString()); - String dateToCompare = valueExpr.getText(); - String oper = ((SQLBinaryOpExpr) nameExpr.getParent()).getOperator().name; - - String zoneId; - if (params.size() > 2) { - zoneId = ZoneId.of(removeSingleQuote(params.get(2).toString())).toString(); + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); + for (TermQueryBuilder termQuery : termQueries) { + boolQuery.should(termQuery); + } + toXContent = boolQuery; + break; + case BETWEEN: + case NBETWEEN: + toXContent = + QueryBuilders.rangeQuery(name).gte(((Object[]) value)[0]).lte(((Object[]) value)[1]); + break; + case GEO_INTERSECTS: + String wkt = cond.getValue().toString(); + try { + ShapeBuilder shapeBuilder = getShapeBuilderFromString(wkt); + toXContent = QueryBuilders.geoShapeQuery(cond.getName(), shapeBuilder); + } catch (IOException e) { + e.printStackTrace(); + throw new SqlParseException( + StringUtils.format("Failed to create shapeBuilder from [%s]", wkt)); + } + break; + case GEO_BOUNDING_BOX: + BoundingBoxFilterParams boxFilterParams = (BoundingBoxFilterParams) cond.getValue(); + Point topLeft = boxFilterParams.getTopLeft(); + Point bottomRight = boxFilterParams.getBottomRight(); + toXContent = + QueryBuilders.geoBoundingBoxQuery(cond.getName()) + .setCorners( + topLeft.getLat(), topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon()); + break; + case GEO_DISTANCE: + DistanceFilterParams distanceFilterParams = (DistanceFilterParams) cond.getValue(); + Point fromPoint = distanceFilterParams.getFrom(); + String distance = trimApostrophes(distanceFilterParams.getDistance()); + toXContent = + QueryBuilders.geoDistanceQuery(cond.getName()) + .distance(distance) + .point(fromPoint.getLat(), fromPoint.getLon()); + break; + case GEO_POLYGON: + PolygonFilterParams polygonFilterParams = (PolygonFilterParams) cond.getValue(); + ArrayList geoPoints = new ArrayList(); + for (Point p : polygonFilterParams.getPolygon()) { + geoPoints.add(new GeoPoint(p.getLat(), p.getLon())); + } + GeoPolygonQueryBuilder polygonFilterBuilder = + QueryBuilders.geoPolygonQuery(cond.getName(), geoPoints); + toXContent = polygonFilterBuilder; + break; + case NIN_TERMS: + case IN_TERMS: + Object[] termValues = (Object[]) value; + if (termValues.length == 1 && termValues[0] instanceof SubQueryExpression) { + termValues = ((SubQueryExpression) termValues[0]).getValues(); + } + Object[] termValuesObjects = new Object[termValues.length]; + for (int i = 0; i < termValues.length; i++) { + termValuesObjects[i] = parseTermValue(termValues[i]); + } + toXContent = QueryBuilders.termsQuery(name, termValuesObjects); + break; + case NTERM: + case TERM: + Object term = ((Object[]) value)[0]; + toXContent = QueryBuilders.termQuery(name, parseTermValue(term)); + break; + case IDS_QUERY: + Object[] idsParameters = (Object[]) value; + String[] ids; + if (idsParameters.length == 2 && idsParameters[1] instanceof SubQueryExpression) { + Object[] idsFromSubQuery = ((SubQueryExpression) idsParameters[1]).getValues(); + ids = arrayOfObjectsToStringArray(idsFromSubQuery, 0, idsFromSubQuery.length - 1); } else { - // Using UTC, if there is no Zone provided. - zoneId = UTC.getId(); + ids = arrayOfObjectsToStringArray(idsParameters, 1, idsParameters.length - 1); } - - RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(field).format(format).timeZone(zoneId); - switch (oper) { - case "<>": - case "=": - toXContent = rangeQuery.gte(dateToCompare).lte(dateToCompare); - break; - case ">": - toXContent = rangeQuery.gt(dateToCompare); - break; - case "<": - toXContent = rangeQuery.lt(dateToCompare); - break; - case ">=": - toXContent = rangeQuery.gte(dateToCompare); - break; - case "<=": - toXContent = rangeQuery.lte(dateToCompare); - break; - case "BETWEEN": - case "NOT BETWEEN": - //todo: Add support for BETWEEN - break; - default: - throw new SqlParseException("date_format does not support the operation " + oper); + toXContent = QueryBuilders.idsQuery().addIds(ids); + break; + case NESTED_COMPLEX: + case NOT_EXISTS_NESTED_COMPLEX: + if (value == null || !(value instanceof Where)) { + throw new SqlParseException("unsupported nested condition"); } - toXContent = applyNot(Condition.OPERATOR.operStringToOpear.get(oper), toXContent); - return toXContent; - } + Where whereNested = (Where) value; + BoolQueryBuilder nestedFilter = QueryMaker.explain(whereNested); - private String removeSingleQuote(String param) { - return param.replaceAll("\'", ""); - } + toXContent = QueryBuilders.nestedQuery(name, nestedFilter, ScoreMode.None); + break; + case CHILDREN_COMPLEX: + if (value == null || !(value instanceof Where)) { + throw new SqlParseException("unsupported nested condition"); + } - private String[] arrayOfObjectsToStringArray(Object[] values, int from, int to) { - String[] strings = new String[to - from + 1]; - int counter = 0; - for (int i = from; i <= to; i++) { - strings[counter] = values[i].toString(); - counter++; + Where whereChildren = (Where) value; + BoolQueryBuilder childrenFilter = QueryMaker.explain(whereChildren); + // todo: pass score mode + toXContent = JoinQueryBuilders.hasChildQuery(name, childrenFilter, ScoreMode.None); + + break; + case SCRIPT: + ScriptFilter scriptFilter = (ScriptFilter) value; + Map params = new HashMap<>(); + if (scriptFilter.containsParameters()) { + params = scriptFilter.getArgs(); } - return strings; - } - private ShapeBuilder getShapeBuilderFromString(String str) throws IOException, SqlParseException { - String json; - if (str.contains("{")) { - json = fixJsonFromElastic(str); + SQLExpr nameExpr = cond.getNameExpr(); + SQLExpr valueExpr = cond.getValueExpr(); + if (nameExpr instanceof SQLMethodInvokeExpr + && ((SQLMethodInvokeExpr) nameExpr).getMethodName().equalsIgnoreCase("date_format")) { + toXContent = makeForDateFormat((SQLMethodInvokeExpr) nameExpr, (SQLCharExpr) valueExpr); } else { - json = WktToGeoJsonConverter.toGeoJson(trimApostrophes(str)); + toXContent = + QueryBuilders.scriptQuery( + new Script( + scriptFilter.getScriptType(), + Script.DEFAULT_SCRIPT_LANG, + scriptFilter.getScript(), + params)); } - - return getShapeBuilderFromJson(json); + break; + default: + throw new SqlParseException("Undefined condition: " + cond.getName()); } - /* - * elastic sends {coordinates=[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], - * type=Polygon} - * proper form is {"coordinates":[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], - * "type":"Polygon"} - * */ - private String fixJsonFromElastic(String elasticJson) { - String properJson = elasticJson.replaceAll("=", ":"); - properJson = properJson.replaceAll("(type)(:)([a-zA-Z]+)", "\"type\":\"$3\""); - properJson = properJson.replaceAll("coordinates", "\"coordinates\""); - return properJson; + toXContent = applyNot(cond.getOPERATOR(), toXContent); + return toXContent; + } + + public static boolean isQueryFunction(String methodName) { + return queryFunctions.contains(methodName.toLowerCase()); + } + + /** + * Helper method used to form a range query object for the date_format function. + * + *

Example: WHERE date_format(dateField, "YYYY-MM-dd") > "2012-01-01" Expected range query: + * "range": { "dateField": { "from": "2012-01-01", "to": null, "include_lower": false, + * "include_upper": true, "time_zone": "America/Los_Angeles", "format": "YYYY-MM-dd", "boost": 1 } + * } + * + * @param nameExpr SQL method expression (ex. date_format(dateField, "YYYY-MM-dd")) + * @param valueExpr Value expression being compared to the SQL method result (ex. "2012-01-01") + * @throws SqlParseException + */ + private ToXContent makeForDateFormat(SQLMethodInvokeExpr nameExpr, SQLCharExpr valueExpr) + throws SqlParseException { + ToXContent toXContent = null; + List params = nameExpr.getParameters(); + + String field = params.get(0).toString(); + String format = removeSingleQuote(params.get(1).toString()); + String dateToCompare = valueExpr.getText(); + String oper = ((SQLBinaryOpExpr) nameExpr.getParent()).getOperator().name; + + String zoneId; + if (params.size() > 2) { + zoneId = ZoneId.of(removeSingleQuote(params.get(2).toString())).toString(); + } else { + // Using UTC, if there is no Zone provided. + zoneId = UTC.getId(); } - private ShapeBuilder getShapeBuilderFromJson(String json) throws IOException { - XContentParser parser = null; - parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, json); - parser.nextToken(); - return ShapeParser.parse(parser); + RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(field).format(format).timeZone(zoneId); + switch (oper) { + case "<>": + case "=": + toXContent = rangeQuery.gte(dateToCompare).lte(dateToCompare); + break; + case ">": + toXContent = rangeQuery.gt(dateToCompare); + break; + case "<": + toXContent = rangeQuery.lt(dateToCompare); + break; + case ">=": + toXContent = rangeQuery.gte(dateToCompare); + break; + case "<=": + toXContent = rangeQuery.lte(dateToCompare); + break; + case "BETWEEN": + case "NOT BETWEEN": + // todo: Add support for BETWEEN + break; + default: + throw new SqlParseException("date_format does not support the operation " + oper); } - private String trimApostrophes(String str) { - return str.substring(1, str.length() - 1); - } + toXContent = applyNot(Condition.OPERATOR.operStringToOpear.get(oper), toXContent); + return toXContent; + } - /** - * Applies negation to query builder if the operation is a "not" operation. - */ - private ToXContent applyNot(Condition.OPERATOR OPERATOR, ToXContent bqb) { - if (NOT_OPERATOR_SET.contains(OPERATOR)) { - bqb = QueryBuilders.boolQuery().mustNot((QueryBuilder) bqb); - } - return bqb; - } + private String removeSingleQuote(String param) { + return param.replaceAll("\'", ""); + } - private Object parseTermValue(Object termValue) { - if (termValue instanceof SQLNumericLiteralExpr) { - termValue = ((SQLNumericLiteralExpr) termValue).getNumber(); - if (termValue instanceof BigDecimal || termValue instanceof Double) { - termValue = ((Number) termValue).doubleValue(); - } else if (termValue instanceof Float) { - termValue = ((Number) termValue).floatValue(); - } else if (termValue instanceof BigInteger || termValue instanceof Long) { - termValue = ((Number) termValue).longValue(); - } else if (termValue instanceof Integer) { - termValue = ((Number) termValue).intValue(); - } else if (termValue instanceof Short) { - termValue = ((Number) termValue).shortValue(); - } else if (termValue instanceof Byte) { - termValue = ((Number) termValue).byteValue(); - } - } else if (termValue instanceof SQLBooleanExpr) { - termValue = ((SQLBooleanExpr) termValue).getValue(); - } else { - termValue = termValue.toString(); - } + private String[] arrayOfObjectsToStringArray(Object[] values, int from, int to) { + String[] strings = new String[to - from + 1]; + int counter = 0; + for (int i = from; i <= to; i++) { + strings[counter] = values[i].toString(); + counter++; + } + return strings; + } + + private ShapeBuilder getShapeBuilderFromString(String str) throws IOException, SqlParseException { + String json; + if (str.contains("{")) { + json = fixJsonFromElastic(str); + } else { + json = WktToGeoJsonConverter.toGeoJson(trimApostrophes(str)); + } - return termValue; + return getShapeBuilderFromJson(json); + } + + /* + * elastic sends {coordinates=[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], + * type=Polygon} + * proper form is {"coordinates":[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], + * "type":"Polygon"} + * */ + private String fixJsonFromElastic(String elasticJson) { + String properJson = elasticJson.replaceAll("=", ":"); + properJson = properJson.replaceAll("(type)(:)([a-zA-Z]+)", "\"type\":\"$3\""); + properJson = properJson.replaceAll("coordinates", "\"coordinates\""); + return properJson; + } + + private ShapeBuilder getShapeBuilderFromJson(String json) throws IOException { + XContentParser parser = null; + parser = + JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, json); + parser.nextToken(); + return ShapeParser.parse(parser); + } + + private String trimApostrophes(String str) { + return str.substring(1, str.length() - 1); + } + + /** Applies negation to query builder if the operation is a "not" operation. */ + private ToXContent applyNot(Condition.OPERATOR OPERATOR, ToXContent bqb) { + if (NOT_OPERATOR_SET.contains(OPERATOR)) { + bqb = QueryBuilders.boolQuery().mustNot((QueryBuilder) bqb); } + return bqb; + } + + private Object parseTermValue(Object termValue) { + if (termValue instanceof SQLNumericLiteralExpr) { + termValue = ((SQLNumericLiteralExpr) termValue).getNumber(); + if (termValue instanceof BigDecimal || termValue instanceof Double) { + termValue = ((Number) termValue).doubleValue(); + } else if (termValue instanceof Float) { + termValue = ((Number) termValue).floatValue(); + } else if (termValue instanceof BigInteger || termValue instanceof Long) { + termValue = ((Number) termValue).longValue(); + } else if (termValue instanceof Integer) { + termValue = ((Number) termValue).intValue(); + } else if (termValue instanceof Short) { + termValue = ((Number) termValue).shortValue(); + } else if (termValue instanceof Byte) { + termValue = ((Number) termValue).byteValue(); + } + } else if (termValue instanceof SQLBooleanExpr) { + termValue = ((SQLBooleanExpr) termValue).getValue(); + } else { + termValue = termValue.toString(); + } + + return termValue; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java index cd9b1f4030..a9eb6113f7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import java.util.HashSet; @@ -18,66 +17,68 @@ import org.opensearch.sql.legacy.query.QueryAction; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class MultiQueryAction extends QueryAction { - private MultiQuerySelect multiQuerySelect; + private MultiQuerySelect multiQuerySelect; + + public MultiQueryAction(Client client, MultiQuerySelect multiSelect) { + super(client, null); + this.multiQuerySelect = multiSelect; + } - public MultiQueryAction(Client client, MultiQuerySelect multiSelect) { - super(client, null); - this.multiQuerySelect = multiSelect; + @Override + public SqlElasticRequestBuilder explain() throws SqlParseException { + if (!isValidMultiSelectReturnFields()) { + throw new SqlParseException( + "on multi query fields/aliases of one table should be subset of other"); } + MultiQueryRequestBuilder requestBuilder = new MultiQueryRequestBuilder(this.multiQuerySelect); + requestBuilder.setFirstSearchRequest( + createRequestBuilder(this.multiQuerySelect.getFirstSelect())); + requestBuilder.setSecondSearchRequest( + createRequestBuilder(this.multiQuerySelect.getSecondSelect())); + requestBuilder.fillTableAliases( + this.multiQuerySelect.getFirstSelect().getFields(), + this.multiQuerySelect.getSecondSelect().getFields()); - @Override - public SqlElasticRequestBuilder explain() throws SqlParseException { - if (!isValidMultiSelectReturnFields()) { - throw new SqlParseException("on multi query fields/aliases of one table should be subset of other"); - } - MultiQueryRequestBuilder requestBuilder = new MultiQueryRequestBuilder(this.multiQuerySelect); - requestBuilder.setFirstSearchRequest(createRequestBuilder(this.multiQuerySelect.getFirstSelect())); - requestBuilder.setSecondSearchRequest(createRequestBuilder(this.multiQuerySelect.getSecondSelect())); - requestBuilder.fillTableAliases(this.multiQuerySelect.getFirstSelect().getFields(), - this.multiQuerySelect.getSecondSelect().getFields()); + return requestBuilder; + } - return requestBuilder; + private boolean isValidMultiSelectReturnFields() { + List firstQueryFields = multiQuerySelect.getFirstSelect().getFields(); + List secondQueryFields = multiQuerySelect.getSecondSelect().getFields(); + if (firstQueryFields.size() > secondQueryFields.size()) { + return isSubsetFields(firstQueryFields, secondQueryFields); } + return isSubsetFields(secondQueryFields, firstQueryFields); + } - private boolean isValidMultiSelectReturnFields() { - List firstQueryFields = multiQuerySelect.getFirstSelect().getFields(); - List secondQueryFields = multiQuerySelect.getSecondSelect().getFields(); - if (firstQueryFields.size() > secondQueryFields.size()) { - return isSubsetFields(firstQueryFields, secondQueryFields); - } - return isSubsetFields(secondQueryFields, firstQueryFields); + private boolean isSubsetFields(List bigGroup, List smallerGroup) { + Set biggerGroup = new HashSet<>(); + for (Field field : bigGroup) { + String fieldName = getNameOrAlias(field); + biggerGroup.add(fieldName); } - - private boolean isSubsetFields(List bigGroup, List smallerGroup) { - Set biggerGroup = new HashSet<>(); - for (Field field : bigGroup) { - String fieldName = getNameOrAlias(field); - biggerGroup.add(fieldName); - } - for (Field field : smallerGroup) { - String fieldName = getNameOrAlias(field); - if (!biggerGroup.contains(fieldName)) { - return false; - } - } - return true; + for (Field field : smallerGroup) { + String fieldName = getNameOrAlias(field); + if (!biggerGroup.contains(fieldName)) { + return false; + } } + return true; + } - private String getNameOrAlias(Field field) { - String fieldName = field.getName(); - if (field.getAlias() != null && !field.getAlias().isEmpty()) { - fieldName = field.getAlias(); - } - return fieldName; + private String getNameOrAlias(Field field) { + String fieldName = field.getName(); + if (field.getAlias() != null && !field.getAlias().isEmpty()) { + fieldName = field.getAlias(); } + return fieldName; + } - protected SearchRequestBuilder createRequestBuilder(Select select) throws SqlParseException { - DefaultQueryAction queryAction = new DefaultQueryAction(client, select); - queryAction.explain(); - return queryAction.getRequestBuilder(); - } + protected SearchRequestBuilder createRequestBuilder(Select select) throws SqlParseException { + DefaultQueryAction queryAction = new DefaultQueryAction(client, select); + queryAction.explain(); + return queryAction.getRequestBuilder(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java index 5340a701ed..b4e92a8de6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import com.alibaba.druid.sql.ast.statement.SQLUnionOperator; @@ -23,108 +22,108 @@ import org.opensearch.sql.legacy.domain.Select; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class MultiQueryRequestBuilder implements SqlElasticRequestBuilder { - private SearchRequestBuilder firstSearchRequest; - private SearchRequestBuilder secondSearchRequest; - private Map firstTableFieldToAlias; - private Map secondTableFieldToAlias; - private MultiQuerySelect multiQuerySelect; - private SQLUnionOperator relation; - - - public MultiQueryRequestBuilder(MultiQuerySelect multiQuerySelect) { - this.multiQuerySelect = multiQuerySelect; - this.relation = multiQuerySelect.getOperation(); - this.firstTableFieldToAlias = new HashMap<>(); - this.secondTableFieldToAlias = new HashMap<>(); - } - - @Override - public ActionRequest request() { - return null; - } - - - @Override - public String explain() { - - try { - XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); - this.firstSearchRequest.request().source().toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); - - XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); - this.secondSearchRequest.request().source().toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); - return String.format("performing %s on :\n left query:\n%s\n right query:\n%s", - this.relation.name, BytesReference.bytes(firstBuilder).utf8ToString(), - BytesReference.bytes(secondBuilder).utf8ToString()); - - } catch (IOException e) { - e.printStackTrace(); - } - return null; - } - - @Override - public ActionResponse get() { - return null; - } - - @Override - public ActionRequestBuilder getBuilder() { - return null; + private SearchRequestBuilder firstSearchRequest; + private SearchRequestBuilder secondSearchRequest; + private Map firstTableFieldToAlias; + private Map secondTableFieldToAlias; + private MultiQuerySelect multiQuerySelect; + private SQLUnionOperator relation; + + public MultiQueryRequestBuilder(MultiQuerySelect multiQuerySelect) { + this.multiQuerySelect = multiQuerySelect; + this.relation = multiQuerySelect.getOperation(); + this.firstTableFieldToAlias = new HashMap<>(); + this.secondTableFieldToAlias = new HashMap<>(); + } + + @Override + public ActionRequest request() { + return null; + } + + @Override + public String explain() { + + try { + XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); + this.firstSearchRequest.request().source().toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); + + XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); + this.secondSearchRequest + .request() + .source() + .toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); + return String.format( + "performing %s on :\n left query:\n%s\n right query:\n%s", + this.relation.name, + BytesReference.bytes(firstBuilder).utf8ToString(), + BytesReference.bytes(secondBuilder).utf8ToString()); + + } catch (IOException e) { + e.printStackTrace(); } - - - public SearchRequestBuilder getFirstSearchRequest() { - return firstSearchRequest; - } - - public SearchRequestBuilder getSecondSearchRequest() { - return secondSearchRequest; - } - - public SQLUnionOperator getRelation() { - return relation; - } - - public void setFirstSearchRequest(SearchRequestBuilder firstSearchRequest) { - this.firstSearchRequest = firstSearchRequest; + return null; + } + + @Override + public ActionResponse get() { + return null; + } + + @Override + public ActionRequestBuilder getBuilder() { + return null; + } + + public SearchRequestBuilder getFirstSearchRequest() { + return firstSearchRequest; + } + + public SearchRequestBuilder getSecondSearchRequest() { + return secondSearchRequest; + } + + public SQLUnionOperator getRelation() { + return relation; + } + + public void setFirstSearchRequest(SearchRequestBuilder firstSearchRequest) { + this.firstSearchRequest = firstSearchRequest; + } + + public void setSecondSearchRequest(SearchRequestBuilder secondSearchRequest) { + this.secondSearchRequest = secondSearchRequest; + } + + public void fillTableAliases(List firstTableFields, List secondTableFields) { + fillTableToAlias(this.firstTableFieldToAlias, firstTableFields); + fillTableToAlias(this.secondTableFieldToAlias, secondTableFields); + } + + private void fillTableToAlias(Map fieldToAlias, List fields) { + for (Field field : fields) { + if (field.getAlias() != null && !field.getAlias().isEmpty()) { + fieldToAlias.put(field.getName(), field.getAlias()); + } } + } - public void setSecondSearchRequest(SearchRequestBuilder secondSearchRequest) { - this.secondSearchRequest = secondSearchRequest; - } - - public void fillTableAliases(List firstTableFields, List secondTableFields) { - fillTableToAlias(this.firstTableFieldToAlias, firstTableFields); - fillTableToAlias(this.secondTableFieldToAlias, secondTableFields); - } - - private void fillTableToAlias(Map fieldToAlias, List fields) { - for (Field field : fields) { - if (field.getAlias() != null && !field.getAlias().isEmpty()) { - fieldToAlias.put(field.getName(), field.getAlias()); - } - } - } - - public Map getFirstTableFieldToAlias() { - return firstTableFieldToAlias; - } + public Map getFirstTableFieldToAlias() { + return firstTableFieldToAlias; + } - public Map getSecondTableFieldToAlias() { - return secondTableFieldToAlias; - } + public Map getSecondTableFieldToAlias() { + return secondTableFieldToAlias; + } - public Select getOriginalSelect(boolean first) { - if (first) { - return this.multiQuerySelect.getFirstSelect(); - } else { - return this.multiQuerySelect.getSecondSelect(); - } + public Select getOriginalSelect(boolean first) { + if (first) { + return this.multiQuerySelect.getFirstSelect(); + } else { + return this.multiQuerySelect.getSecondSelect(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java index e5dd1716ed..72e7232a30 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java @@ -3,35 +3,32 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import com.alibaba.druid.sql.ast.statement.SQLUnionOperator; import org.opensearch.sql.legacy.domain.Select; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class MultiQuerySelect { - private SQLUnionOperator operation; - private Select firstSelect; - private Select secondSelect; - - public MultiQuerySelect(SQLUnionOperator operation, Select firstSelect, Select secondSelect) { - this.operation = operation; - this.firstSelect = firstSelect; - this.secondSelect = secondSelect; - } - - public SQLUnionOperator getOperation() { - return operation; - } - - public Select getFirstSelect() { - return firstSelect; - } - - public Select getSecondSelect() { - return secondSelect; - } + private SQLUnionOperator operation; + private Select firstSelect; + private Select secondSelect; + + public MultiQuerySelect(SQLUnionOperator operation, Select firstSelect, Select secondSelect) { + this.operation = operation; + this.firstSelect = firstSelect; + this.secondSelect = secondSelect; + } + + public SQLUnionOperator getOperation() { + return operation; + } + + public Select getFirstSelect() { + return firstSelect; + } + + public Select getSecondSelect() { + return secondSelect; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java index 312ade197a..25146294bc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner; import org.opensearch.client.Client; @@ -14,64 +13,50 @@ import org.opensearch.sql.legacy.request.SqlRequest; /** - * QueryPlanner builder for Hash Join query. In future, different queries could have its own builders to generate - * QueryPlanner. QueryPlanner would run all stages in its pipeline no matter how it be assembled. + * QueryPlanner builder for Hash Join query. In the future, different queries could have its own + * builders to generate QueryPlanner. QueryPlanner would run all stages in its pipeline no matter + * how it is assembled. */ public class HashJoinQueryPlanRequestBuilder extends HashJoinElasticRequestBuilder { - /** - * Client connection to OpenSearch cluster - */ - private final Client client; - - /** - * Query request - */ - private final SqlRequest request; - - /** - * Query planner configuration - */ - private final Config config; - - - public HashJoinQueryPlanRequestBuilder(Client client, SqlRequest request) { - this.client = client; - this.request = request; - this.config = new Config(); - } - - @Override - public String explain() { - return plan().explain(); - } - - /** - * Planning for the query and create planner for explain/execute later. - * - * @return query planner - */ - public QueryPlanner plan() { - config.configureLimit( - getTotalLimit(), - getFirstTable().getHintLimit(), - getSecondTable().getHintLimit() - ); - config.configureTermsFilterOptimization(isUseTermFiltersOptimization()); - - return new QueryPlanner( - client, - config, - new QueryParams( - getFirstTable(), - getSecondTable(), - getJoinType(), - getT1ToT2FieldsComparison() - ) - ); - } - - public Config getConfig() { - return config; - } + /** Client connection to OpenSearch cluster */ + private final Client client; + + /** Query request */ + private final SqlRequest request; + + /** Query planner configuration */ + private final Config config; + + public HashJoinQueryPlanRequestBuilder(Client client, SqlRequest request) { + this.client = client; + this.request = request; + this.config = new Config(); + } + + @Override + public String explain() { + return plan().explain(); + } + + /** + * Planning for the query and create planner for explain/execute later. + * + * @return query planner + */ + public QueryPlanner plan() { + config.configureLimit( + getTotalLimit(), getFirstTable().getHintLimit(), getSecondTable().getHintLimit()); + config.configureTermsFilterOptimization(isUseTermFiltersOptimization()); + + return new QueryPlanner( + client, + config, + new QueryParams( + getFirstTable(), getSecondTable(), getJoinType(), getT1ToT2FieldsComparison())); + } + + public Config getConfig() { + return config; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java index dcb3c3b727..c5ed48a514 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java @@ -3,32 +3,29 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import java.util.EnumMap; -/** - * Parameters needed for physical operator execution. - */ +/** Parameters needed for physical operator execution. */ public class ExecuteParams { - /** - * Mapping from type to parameters - */ - private EnumMap params = new EnumMap<>(ExecuteParamType.class); - - public void add(ExecuteParamType type, T param) { - params.put(type, param); - } + /** Mapping from type to parameters */ + private EnumMap params = new EnumMap<>(ExecuteParamType.class); - @SuppressWarnings("unchecked") - public T get(ExecuteParamType type) { - return (T) params.get(type); - } + public void add(ExecuteParamType type, T param) { + params.put(type, param); + } - public enum ExecuteParamType { - CLIENT, RESOURCE_MANAGER, EXTRA_QUERY_FILTER, TIMEOUT - } + @SuppressWarnings("unchecked") + public T get(ExecuteParamType type) { + return (T) params.get(type); + } + public enum ExecuteParamType { + CLIENT, + RESOURCE_MANAGER, + EXTRA_QUERY_FILTER, + TIMEOUT + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java index a22f2c5b7f..635ea3aace 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.explain; import com.google.common.collect.ImmutableMap; @@ -14,87 +13,71 @@ import org.opensearch.sql.legacy.query.planner.logical.node.Group; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Base class for different explanation implementation - */ +/** Base class for different explanation implementation */ public class Explanation implements Visitor { - /** - * Hard coding description to be consistent with old nested join explanation - */ - private static final String DESCRIPTION = - "Hash Join algorithm builds hash table based on result of first query, " - + "and then probes hash table to find matched rows for each row returned by second query"; - - /** - * Plans to be explained - */ - private final Plan logicalPlan; - private final Plan physicalPlan; - - /** - * Explanation format - */ - private final ExplanationFormat format; - - public Explanation(Plan logicalPlan, - Plan physicalPlan, - ExplanationFormat format) { - this.logicalPlan = logicalPlan; - this.physicalPlan = physicalPlan; - this.format = format; - } + /** Hard coding description to be consistent with old nested join explanation */ + private static final String DESCRIPTION = + "Hash Join algorithm builds hash table based on result of first query, " + + "and then probes hash table to find matched rows for each row returned by second query"; - @Override - public String toString() { - format.prepare(ImmutableMap.of("description", DESCRIPTION)); + /** Plans to be explained */ + private final Plan logicalPlan; - format.start("Logical Plan"); - logicalPlan.traverse(this); - format.end(); + private final Plan physicalPlan; - format.start("Physical Plan"); - physicalPlan.traverse(this); - format.end(); + /** Explanation format */ + private final ExplanationFormat format; - return format.toString(); - } + public Explanation(Plan logicalPlan, Plan physicalPlan, ExplanationFormat format) { + this.logicalPlan = logicalPlan; + this.physicalPlan = physicalPlan; + this.format = format; + } - @Override - public boolean visit(PlanNode node) { - if (isValidOp(node)) { - format.explain(node); - } - return true; - } + @Override + public String toString() { + format.prepare(ImmutableMap.of("description", DESCRIPTION)); - @Override - public void endVisit(PlanNode node) { - if (isValidOp(node)) { - format.end(); - } - } + format.start("Logical Plan"); + logicalPlan.traverse(this); + format.end(); - /** - * Check if node is a valid logical or physical operator - */ - private boolean isValidOp(PlanNode node) { - return isValidLogical(node) || isPhysical(node); - } + format.start("Physical Plan"); + physicalPlan.traverse(this); + format.end(); - /** - * Valid logical operator means it's Group OR NOT a no-op because Group clarify explanation - */ - private boolean isValidLogical(PlanNode node) { - return (node instanceof LogicalOperator) - && (node instanceof Group || !((LogicalOperator) node).isNoOp()); - } + return format.toString(); + } - /** - * Right now all physical operators are valid and non-no-op - */ - private boolean isPhysical(PlanNode node) { - return node instanceof PhysicalOperator; + @Override + public boolean visit(PlanNode node) { + if (isValidOp(node)) { + format.explain(node); } + return true; + } + @Override + public void endVisit(PlanNode node) { + if (isValidOp(node)) { + format.end(); + } + } + + /** Check if node is a valid logical or physical operator */ + private boolean isValidOp(PlanNode node) { + return isValidLogical(node) || isPhysical(node); + } + + /** Valid logical operator means it's Group OR NOT a no-op because Group clarify explanation */ + private boolean isValidLogical(PlanNode node) { + return (node instanceof LogicalOperator) + && (node instanceof Group || !((LogicalOperator) node).isNoOp()); + } + + /** Right now all physical operators are valid and non-no-op */ + private boolean isPhysical(PlanNode node) { + return node instanceof PhysicalOperator; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java index 23c8bb76fe..a349666221 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java @@ -3,42 +3,34 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.explain; import java.util.Map; -/** - * Explanation format - */ +/** Explanation format */ public interface ExplanationFormat { - /** - * Initialize internal data structure - * - * @param kvs key-value pairs - */ - void prepare(Map kvs); - - /** - * Start a new section in explanation. - * - * @param name section name - */ - void start(String name); - - - /** - * Explain and add to current section. - * - * @param object object to be added to explanation - */ - void explain(Object object); - - - /** - * End current section. - */ - void end(); - + /** + * Initialize internal data structure + * + * @param kvs key-value pairs + */ + void prepare(Map kvs); + + /** + * Start a new section in explanation. + * + * @param name section name + */ + void start(String name); + + /** + * Explain and add to current section. + * + * @param object object to be added to explanation + */ + void explain(Object object); + + /** End current section. */ + void end(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java index 404205d30b..7bf4f833de 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.explain; import java.util.ArrayDeque; @@ -13,89 +12,80 @@ import org.json.JSONException; import org.json.JSONObject; -/** - * Explain query plan in JSON format. - */ +/** Explain query plan in JSON format. */ public class JsonExplanationFormat implements ExplanationFormat { - /** - * JSONObject stack to track the path from root to current ndoe - */ - private final Deque jsonObjStack = new ArrayDeque<>(); - - /** - * Indentation in final output string - */ - private final int indent; - - public JsonExplanationFormat(int indent) { - this.indent = indent; - } - - @Override - public void prepare(Map kvs) { - jsonObjStack.push(new JSONObject(kvs)); - } - - @Override - public void start(String name) { - JSONObject json = new JSONObject(); - jsonObjStack.peek().put(name, json); - jsonObjStack.push(json); - } - - @Override - public void explain(Object obj) { - JSONObject json = new JSONObject(obj); // JSONify using getter - jsonifyValueIfValidJson(json); - appendToArrayIfExist(nodeName(obj), json); - jsonObjStack.push(json); + /** JSONObject stack to track the path from root to current ndoe */ + private final Deque jsonObjStack = new ArrayDeque<>(); + + /** Indentation in final output string */ + private final int indent; + + public JsonExplanationFormat(int indent) { + this.indent = indent; + } + + @Override + public void prepare(Map kvs) { + jsonObjStack.push(new JSONObject(kvs)); + } + + @Override + public void start(String name) { + JSONObject json = new JSONObject(); + jsonObjStack.peek().put(name, json); + jsonObjStack.push(json); + } + + @Override + public void explain(Object obj) { + JSONObject json = new JSONObject(obj); // JSONify using getter + jsonifyValueIfValidJson(json); + appendToArrayIfExist(nodeName(obj), json); + jsonObjStack.push(json); + } + + @Override + public void end() { + jsonObjStack.pop(); + } + + @Override + public String toString() { + return jsonObjStack.pop().toString(indent); + } + + /** + * Trick to parse JSON in field getter due to missing support for custom processor in org.json. + * And also because it's not appropriate to make getter aware of concrete format logic + */ + private void jsonifyValueIfValidJson(JSONObject json) { + for (String key : json.keySet()) { + try { + JSONObject jsonValue = new JSONObject(json.getString(key)); + json.put(key, jsonValue); + } catch (JSONException e) { + // Ignore value that is not a valid JSON. + } } - - @Override - public void end() { - jsonObjStack.pop(); - } - - @Override - public String toString() { - return jsonObjStack.pop().toString(indent); - } - - /** - * Trick to parse JSON in field getter due to missing support for custom processor - * in org.json. And also because it's not appropriate to make getter aware of concrete format logic - */ - private void jsonifyValueIfValidJson(JSONObject json) { - for (String key : json.keySet()) { - try { - JSONObject jsonValue = new JSONObject(json.getString(key)); - json.put(key, jsonValue); - } catch (JSONException e) { - // Ignore value that is not a valid JSON. - } - } + } + + private String nodeName(Object obj) { + return obj.toString(); // obj.getClass().getSimpleName(); + } + + /** Replace JSONObject by JSONArray if key is duplicate */ + private void appendToArrayIfExist(String name, JSONObject child) { + JSONObject parent = jsonObjStack.peek(); + Object otherChild = parent.opt(name); + if (otherChild == null) { + parent.put(name, child); + } else { + if (!(otherChild instanceof JSONArray)) { + parent.remove(name); + parent.append(name, otherChild); + } + parent.append(name, child); } - - private String nodeName(Object obj) { - return obj.toString(); //obj.getClass().getSimpleName(); - } - - /** - * Replace JSONObject by JSONArray if key is duplicate - */ - private void appendToArrayIfExist(String name, JSONObject child) { - JSONObject parent = jsonObjStack.peek(); - Object otherChild = parent.opt(name); - if (otherChild == null) { - parent.put(name, child); - } else { - if (!(otherChild instanceof JSONArray)) { - parent.remove(name); - parent.append(name, otherChild); - } - parent.append(name, child); - } - } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java index 825af762f5..b814f1f563 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical; import java.util.Map; @@ -11,32 +10,29 @@ import org.opensearch.sql.legacy.query.planner.core.PlanNode; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Logical operator in logical plan tree. - */ +/** Logical operator in logical plan tree. */ public interface LogicalOperator extends PlanNode { - /** - * If current operator is no operation. It depends on specific internal state of operator - *

- * Ignore this field in explanation because all explainable operator are NOT no-op. - * - * @return true if NoOp - */ - @JSONPropertyIgnore - default boolean isNoOp() { - return false; - } - - /** - * Map logical operator to physical operators (possibly 1 to N mapping) - *

- * Note that generic type on PhysicalOperator[] would enforce all impl convert array to generic type array - * because generic type array is unable to be created directly. - * - * @param optimalOps optimal physical operators estimated so far - * @return list of physical operator - */ - PhysicalOperator[] toPhysical(Map> optimalOps); + /** + * If current operator is no operation. It depends on specific internal state of operator + * + *

Ignore this field in explanation because all explainable operator are NOT no-op. + * + * @return true if NoOp + */ + @JSONPropertyIgnore + default boolean isNoOp() { + return false; + } + /** + * Map logical operator to physical operators (possibly 1 to N mapping) + * + *

Note that generic type on PhysicalOperator[] would enforce all impl convert array to generic + * type array because generic type array is unable to be created directly. + * + * @param optimalOps optimal physical operators estimated so far + * @return list of physical operator + */ + PhysicalOperator[] toPhysical(Map> optimalOps); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java index 369da44e7f..05a797bbe0 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical; import static org.opensearch.sql.legacy.query.planner.logical.node.Project.Visitor; @@ -32,176 +31,154 @@ import org.opensearch.sql.legacy.query.planner.logical.rule.ProjectionPushDown; import org.opensearch.sql.legacy.query.planner.logical.rule.SelectionPushDown; -/** - * Logical query plan. - */ +/** Logical query plan. */ public class LogicalPlan implements Plan { - /** - * Planner configuration - */ - private final Config config; - - /** - * Parameters - */ - private final QueryParams params; - - /** - * Root node of logical query plan tree - */ - private final LogicalOperator root; - - /** - * Transformation rule - */ - private final List rules = Arrays.asList( - new SelectionPushDown(), //Enforce this run first to simplify Group. Avoid this order dependency in future. - new ProjectionPushDown() - ); - - public LogicalPlan(Config config, QueryParams params) { - this.config = config; - this.params = params; - this.root = buildPlanTree(); - } - - @Override - public void traverse(Visitor visitor) { - root.accept(visitor); - } - - @Override - public void optimize() { - for (LogicalPlanVisitor rule : rules) { - root.accept(rule); - } + /** Planner configuration */ + private final Config config; + + /** Parameters */ + private final QueryParams params; + + /** Root node of logical query plan tree */ + private final LogicalOperator root; + + /** Transformation rule */ + private final List rules = + Arrays.asList( + new SelectionPushDown(), // Enforce this run first to simplify Group. Avoid this order + // dependency in future. + new ProjectionPushDown()); + + public LogicalPlan(Config config, QueryParams params) { + this.config = config; + this.params = params; + this.root = buildPlanTree(); + } + + @Override + public void traverse(Visitor visitor) { + root.accept(visitor); + } + + @Override + public void optimize() { + for (LogicalPlanVisitor rule : rules) { + root.accept(rule); } - - /** - * Build logical plan tree - */ - private LogicalOperator buildPlanTree() { - return project( - top( - sort( - filter( - join( - top( - group(params.firstRequest(), config.scrollPageSize()[0]), - config.tableLimit1() - ), - top( - group(params.secondRequest(), config.scrollPageSize()[1]), - config.tableLimit2() - ) - ) - ) - ), config.totalLimit() - ) - ); - } - - /** - * Create projection operator - */ - private LogicalOperator project(LogicalOperator next) { - Project project = new Project(next); - for (TableInJoinRequestBuilder req : getRequests()) { - if (req.getOriginalSelect().isSelectAll()) { - project.projectAll(req.getAlias()); - } else { - project.project(req.getAlias(), req.getReturnedFields()); - } - } - return project; + } + + /** Build logical plan tree */ + private LogicalOperator buildPlanTree() { + return project( + top( + sort( + filter( + join( + top( + group(params.firstRequest(), config.scrollPageSize()[0]), + config.tableLimit1()), + top( + group(params.secondRequest(), config.scrollPageSize()[1]), + config.tableLimit2())))), + config.totalLimit())); + } + + /** Create projection operator */ + private LogicalOperator project(LogicalOperator next) { + Project project = new Project(next); + for (TableInJoinRequestBuilder req : getRequests()) { + if (req.getOriginalSelect().isSelectAll()) { + project.projectAll(req.getAlias()); + } else { + project.project(req.getAlias(), req.getReturnedFields()); + } } + return project; + } - private LogicalOperator top(LogicalOperator next, int limit) { - if (limit > 0) { - return new Top(next, limit); - } - return next; + private LogicalOperator top(LogicalOperator next, int limit) { + if (limit > 0) { + return new Top(next, limit); } - - private LogicalOperator sort(LogicalOperator next) { - List orderByColNames = new ArrayList<>(); - String orderByType = ""; - for (TableInJoinRequestBuilder request : getRequests()) { - List orderBys = request.getOriginalSelect().getOrderBys(); - if (orderBys != null) { - String tableAlias = request.getAlias() == null ? "" : request.getAlias() + "."; - for (Order orderBy : orderBys) { - orderByColNames.add(tableAlias + orderBy.getName()); - orderByType = orderBy.getType(); - } - } - } - - if (orderByColNames.isEmpty()) { - return next; + return next; + } + + private LogicalOperator sort(LogicalOperator next) { + List orderByColNames = new ArrayList<>(); + String orderByType = ""; + for (TableInJoinRequestBuilder request : getRequests()) { + List orderBys = request.getOriginalSelect().getOrderBys(); + if (orderBys != null) { + String tableAlias = request.getAlias() == null ? "" : request.getAlias() + "."; + for (Order orderBy : orderBys) { + orderByColNames.add(tableAlias + orderBy.getName()); + orderByType = orderBy.getType(); } - return new Sort(next, orderByColNames, orderByType); + } } - private LogicalOperator filter(LogicalOperator next) { - Filter filter = new Filter(next, getRequests()); - if (filter.isNoOp()) { - return next; - } - return filter; + if (orderByColNames.isEmpty()) { + return next; } + return new Sort(next, orderByColNames, orderByType); + } - private LogicalOperator join(LogicalOperator left, LogicalOperator right) { - return new Join( - left, right, - params.joinType(), - groupJoinConditionByOr(), - config.blockSize(), - config.isUseTermsFilterOptimization() - ); + private LogicalOperator filter(LogicalOperator next) { + Filter filter = new Filter(next, getRequests()); + if (filter.isNoOp()) { + return next; } - - /** - * Group conditions in ON by OR because it makes hash table group be required too - */ - private JoinCondition groupJoinConditionByOr() { - String leftTableAlias = params.firstRequest().getAlias(); - String rightTableAlias = params.secondRequest().getAlias(); - - JoinCondition orCond; - if (params.joinConditions().isEmpty()) { - orCond = new JoinCondition(leftTableAlias, rightTableAlias, 0); - } else { - orCond = new JoinCondition(leftTableAlias, rightTableAlias, params.joinConditions().size()); - for (int i = 0; i < params.joinConditions().size(); i++) { - List> andCond = params.joinConditions().get(i); - String[] leftColumnNames = new String[andCond.size()]; - String[] rightColumnNames = new String[andCond.size()]; - - for (int j = 0; j < andCond.size(); j++) { - Map.Entry cond = andCond.get(j); - leftColumnNames[j] = cond.getKey().getName(); - rightColumnNames[j] = cond.getValue().getName(); - } - - orCond.addLeftColumnNames(i, leftColumnNames); - orCond.addRightColumnNames(i, rightColumnNames); - } + return filter; + } + + private LogicalOperator join(LogicalOperator left, LogicalOperator right) { + return new Join( + left, + right, + params.joinType(), + groupJoinConditionByOr(), + config.blockSize(), + config.isUseTermsFilterOptimization()); + } + + /** Group conditions in ON by OR because it makes hash table group be required too */ + private JoinCondition groupJoinConditionByOr() { + String leftTableAlias = params.firstRequest().getAlias(); + String rightTableAlias = params.secondRequest().getAlias(); + + JoinCondition orCond; + if (params.joinConditions().isEmpty()) { + orCond = new JoinCondition(leftTableAlias, rightTableAlias, 0); + } else { + orCond = new JoinCondition(leftTableAlias, rightTableAlias, params.joinConditions().size()); + for (int i = 0; i < params.joinConditions().size(); i++) { + List> andCond = params.joinConditions().get(i); + String[] leftColumnNames = new String[andCond.size()]; + String[] rightColumnNames = new String[andCond.size()]; + + for (int j = 0; j < andCond.size(); j++) { + Map.Entry cond = andCond.get(j); + leftColumnNames[j] = cond.getKey().getName(); + rightColumnNames[j] = cond.getValue().getName(); } - return orCond; - } - private LogicalOperator group(TableInJoinRequestBuilder request, int pageSize) { - return new Group(new TableScan(request, pageSize)); + orCond.addLeftColumnNames(i, leftColumnNames); + orCond.addRightColumnNames(i, rightColumnNames); + } } + return orCond; + } - private List getRequests() { - return Arrays.asList(params.firstRequest(), params.secondRequest()); - } + private LogicalOperator group(TableInJoinRequestBuilder request, int pageSize) { + return new Group(new TableScan(request, pageSize)); + } - private List map(Collection source, Function func) { - return source.stream().map(func).collect(Collectors.toList()); - } + private List getRequests() { + return Arrays.asList(params.firstRequest(), params.secondRequest()); + } + private List map(Collection source, Function func) { + return source.stream().map(func).collect(Collectors.toList()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java index b779242a09..ef9e1a8d93 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical; import org.opensearch.sql.legacy.query.planner.core.PlanNode; @@ -21,93 +20,86 @@ */ public interface LogicalPlanVisitor extends Visitor { - @Override - default boolean visit(PlanNode op) { - if (op instanceof Project) { - return visit((Project) op); - } else if (op instanceof Filter) { - return visit((Filter) op); - } else if (op instanceof Join) { - return visit((Join) op); - } else if (op instanceof Group) { - return visit((Group) op); - } else if (op instanceof TableScan) { - return visit((TableScan) op); - } else if (op instanceof Top) { - return visit((Top) op); - } else if (op instanceof Sort) { - return visit((Sort) op); - } - throw new IllegalArgumentException("Unknown operator type: " + op); - } - - @Override - default void endVisit(PlanNode op) { - if (op instanceof Project) { - endVisit((Project) op); - } else if (op instanceof Filter) { - endVisit((Filter) op); - } else if (op instanceof Join) { - endVisit((Join) op); - } else if (op instanceof Group) { - endVisit((Group) op); - } else if (op instanceof TableScan) { - endVisit((TableScan) op); - } else if (op instanceof Top) { - endVisit((Top) op); - } else if (op instanceof Sort) { - endVisit((Sort) op); - } else { - throw new IllegalArgumentException("Unknown operator type: " + op); - } - } - - default boolean visit(Project project) { - return true; - } - - default void endVisit(Project project) { - } - - default boolean visit(Filter filter) { - return true; - } - - default void endVisit(Filter filter) { - } - - default boolean visit(Join join) { - return true; - } - - default void endVisit(Join join) { - } - - default boolean visit(Group group) { - return true; - } - - default void endVisit(Group group) { - } - - default boolean visit(TableScan scan) { - return true; - } - - default void endVisit(TableScan scan) { - } - - default boolean visit(Top top) { - return true; - } - - default void endVisit(Top top) { - } - - default boolean visit(Sort sort) { - return true; - } - - default void endVisit(Sort sort) { - } + @Override + default boolean visit(PlanNode op) { + if (op instanceof Project) { + return visit((Project) op); + } else if (op instanceof Filter) { + return visit((Filter) op); + } else if (op instanceof Join) { + return visit((Join) op); + } else if (op instanceof Group) { + return visit((Group) op); + } else if (op instanceof TableScan) { + return visit((TableScan) op); + } else if (op instanceof Top) { + return visit((Top) op); + } else if (op instanceof Sort) { + return visit((Sort) op); + } + throw new IllegalArgumentException("Unknown operator type: " + op); + } + + @Override + default void endVisit(PlanNode op) { + if (op instanceof Project) { + endVisit((Project) op); + } else if (op instanceof Filter) { + endVisit((Filter) op); + } else if (op instanceof Join) { + endVisit((Join) op); + } else if (op instanceof Group) { + endVisit((Group) op); + } else if (op instanceof TableScan) { + endVisit((TableScan) op); + } else if (op instanceof Top) { + endVisit((Top) op); + } else if (op instanceof Sort) { + endVisit((Sort) op); + } else { + throw new IllegalArgumentException("Unknown operator type: " + op); + } + } + + default boolean visit(Project project) { + return true; + } + + default void endVisit(Project project) {} + + default boolean visit(Filter filter) { + return true; + } + + default void endVisit(Filter filter) {} + + default boolean visit(Join join) { + return true; + } + + default void endVisit(Join join) {} + + default boolean visit(Group group) { + return true; + } + + default void endVisit(Group group) {} + + default boolean visit(TableScan scan) { + return true; + } + + default void endVisit(TableScan scan) {} + + default boolean visit(Top top) { + return true; + } + + default void endVisit(Top top) {} + + default boolean visit(Sort sort) { + return true; + } + + default void endVisit(Sort sort) {} } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java index f5e3e40f2d..5d4423d67a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.HashMap; @@ -16,59 +15,54 @@ import org.opensearch.sql.legacy.query.planner.logical.LogicalOperator; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Selection expression - */ +/** Selection expression */ public class Filter implements LogicalOperator { - private final LogicalOperator next; + private final LogicalOperator next; - /** - * Alias to WHERE clause mapping - */ - private final Map aliasWhereMap = new HashMap<>(); + /** Alias to WHERE clause mapping */ + private final Map aliasWhereMap = new HashMap<>(); - public Filter(LogicalOperator next, List tables) { - this.next = next; - - for (TableInJoinRequestBuilder table : tables) { - Select select = table.getOriginalSelect(); - if (select.getWhere() != null) { - aliasWhereMap.put(table.getAlias(), select.getWhere()); - } - } - } + public Filter(LogicalOperator next, List tables) { + this.next = next; - public Filter(LogicalOperator next) { - this.next = next; + for (TableInJoinRequestBuilder table : tables) { + Select select = table.getOriginalSelect(); + if (select.getWhere() != null) { + aliasWhereMap.put(table.getAlias(), select.getWhere()); + } } + } - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + public Filter(LogicalOperator next) { + this.next = next; + } - @Override - public boolean isNoOp() { - return aliasWhereMap.isEmpty(); - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - // Always no-op after push down, skip it by returning next - return new PhysicalOperator[]{optimalOps.get(next)}; - } + @Override + public boolean isNoOp() { + return aliasWhereMap.isEmpty(); + } - public void pushDown(String tableAlias, Filter pushedDownFilter) { - Where pushedDownWhere = pushedDownFilter.aliasWhereMap.remove(tableAlias); - if (pushedDownWhere != null) { - aliasWhereMap.put(tableAlias, pushedDownWhere); - } - } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + // Always no-op after push down, skip it by returning next + return new PhysicalOperator[] {optimalOps.get(next)}; + } - @Override - public String toString() { - return "Filter [ conditions=" + aliasWhereMap.values() + " ]"; + public void pushDown(String tableAlias, Filter pushedDownFilter) { + Where pushedDownWhere = pushedDownFilter.aliasWhereMap.remove(tableAlias); + if (pushedDownWhere != null) { + aliasWhereMap.put(tableAlias, pushedDownWhere); } + } + @Override + public String toString() { + return "Filter [ conditions=" + aliasWhereMap.values() + " ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java index 5ae9ddc0a2..da94ae74da 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.Map; @@ -11,66 +10,57 @@ import org.opensearch.sql.legacy.query.planner.logical.LogicalOperator; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Project-Filter-TableScan group for push down optimization convenience. - */ +/** Project-Filter-TableScan group for push down optimization convenience. */ public class Group implements LogicalOperator { - /** - * Optional pushed down projection - */ - private Project project; - - /** - * Optional pushed down filter (selection) - */ - private Filter filter; + /** Optional pushed down projection */ + private Project project; - /** - * Required table scan operator - */ - private final TableScan tableScan; + /** Optional pushed down filter (selection) */ + private Filter filter; + /** Required table scan operator */ + private final TableScan tableScan; - public Group(TableScan tableScan) { - this.tableScan = tableScan; - this.filter = new Filter(tableScan); - this.project = new Project<>(filter); - } + public Group(TableScan tableScan) { + this.tableScan = tableScan; + this.filter = new Filter(tableScan); + this.project = new Project<>(filter); + } - @Override - public boolean isNoOp() { - return true; - } + @Override + public boolean isNoOp() { + return true; + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - return tableScan.toPhysical(optimalOps); - } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + return tableScan.toPhysical(optimalOps); + } - @Override - public PlanNode[] children() { - return new PlanNode[]{topNonNullNode()}; - } + @Override + public PlanNode[] children() { + return new PlanNode[] {topNonNullNode()}; + } - private PlanNode topNonNullNode() { - return project != null ? project : (filter != null ? filter : tableScan); - } + private PlanNode topNonNullNode() { + return project != null ? project : (filter != null ? filter : tableScan); + } - public String id() { - return tableScan.getTableAlias(); - } + public String id() { + return tableScan.getTableAlias(); + } - public void pushDown(Project project) { - this.project.pushDown(id(), project); - } + public void pushDown(Project project) { + this.project.pushDown(id(), project); + } - public void pushDown(Filter filter) { - this.filter.pushDown(id(), filter); - } + public void pushDown(Filter filter) { + this.filter.pushDown(id(), filter); + } - @Override - public String toString() { - return "Group"; - } + @Override + public String toString() { + return "Group"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java index ae833ca580..405a8a9f72 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -15,76 +14,65 @@ import org.opensearch.sql.legacy.query.planner.physical.node.join.BlockHashJoin; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize; -/** - * Join expression - */ +/** Join expression */ public class Join implements LogicalOperator { - private final LogicalOperator left; - private final LogicalOperator right; - - /** - * Join type, ex inner join, left join - */ - private final JoinType type; - - /** - * Joined columns in ON condition - */ - private final JoinCondition condition; - - /** - * Block size calculator - */ - private final BlockSize blockSize; - - /** - * Use terms filter optimization or not - */ - private final boolean isUseTermsFilterOptimization; - - - public Join(LogicalOperator left, - LogicalOperator right, - JoinType joinType, - JoinCondition condition, - BlockSize blockSize, - boolean isUseTermsFilterOptimization) { - this.left = left; - this.right = right; - this.type = joinType; - this.condition = condition; - this.blockSize = blockSize; - this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; - } - - @Override - public PlanNode[] children() { - return new PlanNode[]{left, right}; - } - - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - PhysicalOperator optimalLeft = optimalOps.get(left); - PhysicalOperator optimalRight = optimalOps.get(right); - return new PhysicalOperator[]{ - new BlockHashJoin<>( - optimalLeft, optimalRight, type, condition, - blockSize, isUseTermsFilterOptimization - ) - }; - } - - public JoinCondition conditions() { - return condition; - } - - @Override - public String toString() { - return "Join [ conditions=" + condition + " type=" + type + " ]"; - } + private final LogicalOperator left; + private final LogicalOperator right; + + /** Join type, ex inner join, left join */ + private final JoinType type; + + /** Joined columns in ON condition */ + private final JoinCondition condition; + + /** Block size calculator */ + private final BlockSize blockSize; + + /** Use terms filter optimization or not */ + private final boolean isUseTermsFilterOptimization; + + public Join( + LogicalOperator left, + LogicalOperator right, + JoinType joinType, + JoinCondition condition, + BlockSize blockSize, + boolean isUseTermsFilterOptimization) { + this.left = left; + this.right = right; + this.type = joinType; + this.condition = condition; + this.blockSize = blockSize; + this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; + } + + @Override + public PlanNode[] children() { + return new PlanNode[] {left, right}; + } + + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + PhysicalOperator optimalLeft = optimalOps.get(left); + PhysicalOperator optimalRight = optimalOps.get(right); + return new PhysicalOperator[] { + new BlockHashJoin<>( + optimalLeft, optimalRight, type, condition, blockSize, isUseTermsFilterOptimization) + }; + } + + public JoinCondition conditions() { + return condition; + } + + @Override + public String toString() { + return "Join [ conditions=" + condition + " type=" + type + " ]"; + } /** + *

      * Join condition in ON clause grouped by OR.
      * 

* For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address" @@ -97,80 +85,77 @@ public String toString() { * leftTableAlias: "a", rightTableAlias: "b" * leftColumnNames: [ ["name", "age"], ["location"] ] * rightColumnNames: [ ["id", "age"], ["address" ] ] + *

*/ public static class JoinCondition { - private final String leftTableAlias; - private final String rightTableAlias; + private final String leftTableAlias; + private final String rightTableAlias; - private final String[][] leftColumnNames; - private final String[][] rightColumnNames; + private final String[][] leftColumnNames; + private final String[][] rightColumnNames; - public JoinCondition(String leftTableAlias, - String rightTableAlias, - int groupSize) { - this.leftTableAlias = leftTableAlias; - this.rightTableAlias = rightTableAlias; - this.leftColumnNames = new String[groupSize][]; - this.rightColumnNames = new String[groupSize][]; - } + public JoinCondition(String leftTableAlias, String rightTableAlias, int groupSize) { + this.leftTableAlias = leftTableAlias; + this.rightTableAlias = rightTableAlias; + this.leftColumnNames = new String[groupSize][]; + this.rightColumnNames = new String[groupSize][]; + } - public void addLeftColumnNames(int groupNum, String[] colNames) { - leftColumnNames[groupNum] = colNames; - } + public void addLeftColumnNames(int groupNum, String[] colNames) { + leftColumnNames[groupNum] = colNames; + } - public void addRightColumnNames(int groupNum, String[] colNames) { - rightColumnNames[groupNum] = colNames; - } + public void addRightColumnNames(int groupNum, String[] colNames) { + rightColumnNames[groupNum] = colNames; + } - public int groupSize() { - return leftColumnNames.length; - } + public int groupSize() { + return leftColumnNames.length; + } - public String leftTableAlias() { - return leftTableAlias; - } + public String leftTableAlias() { + return leftTableAlias; + } - public String rightTableAlias() { - return rightTableAlias; - } + public String rightTableAlias() { + return rightTableAlias; + } - public String[] leftColumnNames(int groupNum) { - return leftColumnNames[groupNum]; - } + public String[] leftColumnNames(int groupNum) { + return leftColumnNames[groupNum]; + } - public String[] rightColumnNames(int groupNum) { - return rightColumnNames[groupNum]; - } + public String[] rightColumnNames(int groupNum) { + return rightColumnNames[groupNum]; + } - @Override - public String toString() { - StringBuilder str = new StringBuilder(); - int groupSize = leftColumnNames.length; - for (int i = 0; i < groupSize; i++) { - if (i > 0) { - str.append(" OR "); - } - - str.append("( "); - int condSize = leftColumnNames[i].length; - for (int j = 0; j < condSize; j++) { - if (j > 0) { - str.append(" AND "); - } - str.append(leftTableAlias). - append("."). - append(leftColumnNames[i][j]). - append(" = "). - append(rightTableAlias). - append("."). - append(rightColumnNames[i][j]); - } - str.append(" )"); - } - return str.toString(); + @Override + public String toString() { + StringBuilder str = new StringBuilder(); + int groupSize = leftColumnNames.length; + for (int i = 0; i < groupSize; i++) { + if (i > 0) { + str.append(" OR "); } + str.append("( "); + int condSize = leftColumnNames[i].length; + for (int j = 0; j < condSize; j++) { + if (j > 0) { + str.append(" AND "); + } + str.append(leftTableAlias) + .append(".") + .append(leftColumnNames[i][j]) + .append(" = ") + .append(rightTableAlias) + .append(".") + .append(rightColumnNames[i][j]); + } + str.append(" )"); + } + return str.toString(); } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java index 1648cf854d..72ffbd4652 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.estimation; import static java.util.Comparator.comparing; @@ -18,39 +17,35 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; /** - * Convert and estimate the cost of each operator and generate one optimal plan. - * Memorize cost of candidate physical operators in the bottom-up way to avoid duplicate computation. + * Convert and estimate the cost of each operator and generate one optimal plan. Memorize cost of + * candidate physical operators in the bottom-up way to avoid duplicate computation. */ public class Estimation implements LogicalPlanVisitor { - /** - * Optimal physical operator for logical operator based on completed estimation - */ - private Map> optimalOps = new IdentityHashMap<>(); - - /** - * Keep tracking of the operator that exit visit() - */ - private PhysicalOperator root; - - @Override - public boolean visit(Group group) { - return false; - } - - @SuppressWarnings("unchecked") - @Override - public void endVisit(PlanNode node) { - LogicalOperator op = (LogicalOperator) node; - PhysicalOperator optimal = Arrays.stream(op.toPhysical(optimalOps)). - min(comparing(PhysicalOperator::estimate)). - orElseThrow(() -> new IllegalStateException( - "No optimal operator found: " + op)); - optimalOps.put(op, optimal); - root = optimal; - } - - public PhysicalOperator optimalPlan() { - return root; - } + /** Optimal physical operator for logical operator based on completed estimation */ + private Map> optimalOps = new IdentityHashMap<>(); + + /** Keep tracking of the operator that exit visit() */ + private PhysicalOperator root; + + @Override + public boolean visit(Group group) { + return false; + } + + @SuppressWarnings("unchecked") + @Override + public void endVisit(PlanNode node) { + LogicalOperator op = (LogicalOperator) node; + PhysicalOperator optimal = + Arrays.stream(op.toPhysical(optimalOps)) + .min(comparing(PhysicalOperator::estimate)) + .orElseThrow(() -> new IllegalStateException("No optimal operator found: " + op)); + optimalOps.put(op, optimal); + root = optimal; + } + + public PhysicalOperator optimalPlan() { + return root; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java index 4a20b1833b..1811af5158 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import java.util.Collection; @@ -17,50 +16,42 @@ */ public interface HashTable { - /** - * Add one row to the hash table - * - * @param row row - */ - void add(Row row); - - - /** - * Find all matched row(s) in the hash table. - * - * @param row row to be matched - * @return all matches - */ - Collection> match(Row row); - - - /** - * Mapping from right field to value(s) of left size - * - * @return - */ - Map>[] rightFieldWithLeftValues(); - - - /** - * Get size of hash table - * - * @return size of hash table - */ - int size(); - - - /** - * Is hash table empty? - * - * @return true for yes - */ - boolean isEmpty(); - - - /** - * Clear internal data structure - */ - void clear(); - + /** + * Add one row to the hash table + * + * @param row row + */ + void add(Row row); + + /** + * Find all matched row(s) in the hash table. + * + * @param row row to be matched + * @return all matches + */ + Collection> match(Row row); + + /** + * Mapping from right field to value(s) of left size + * + * @return + */ + Map>[] rightFieldWithLeftValues(); + + /** + * Get size of hash table + * + * @return size of hash table + */ + int size(); + + /** + * Is hash table empty? + * + * @return true for yes + */ + boolean isEmpty(); + + /** Clear internal data structure */ + void clear(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java index c22eb9dc19..08867f8c5d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static org.opensearch.sql.legacy.query.planner.logical.node.Join.JoinCondition; @@ -15,75 +14,69 @@ import java.util.Set; import org.opensearch.sql.legacy.query.planner.physical.Row; -/** - * Hash table group with each hash table per AND join condition. - */ +/** Hash table group with each hash table per AND join condition. */ public class HashTableGroup implements HashTable { - private final HashTable[] hashTables; + private final HashTable[] hashTables; - /** - * Number of rows stored in the hash table (in other words, = block size) - */ - private int numOfRows = 0; + /** Number of rows stored in the hash table (in other words, = block size) */ + private int numOfRows = 0; - @SuppressWarnings("unchecked") - public HashTableGroup(JoinCondition condition) { - int groupSize = condition.groupSize(); - if (groupSize == 0) { - // Create one hash table (degraded to list) for Cross Join - hashTables = new HashTable[]{new ListHashTable()}; - } else { - hashTables = new HashTable[groupSize]; - for (int i = 0; i < groupSize; i++) { - hashTables[i] = new DefaultHashTable<>( - condition.leftColumnNames(i), - condition.rightColumnNames(i) - ); - } - } + @SuppressWarnings("unchecked") + public HashTableGroup(JoinCondition condition) { + int groupSize = condition.groupSize(); + if (groupSize == 0) { + // Create one hash table (degraded to list) for Cross Join + hashTables = new HashTable[] {new ListHashTable()}; + } else { + hashTables = new HashTable[groupSize]; + for (int i = 0; i < groupSize; i++) { + hashTables[i] = + new DefaultHashTable<>(condition.leftColumnNames(i), condition.rightColumnNames(i)); + } } + } - @Override - public void add(Row row) { - for (HashTable hashTable : hashTables) { - hashTable.add(row); - } - numOfRows++; + @Override + public void add(Row row) { + for (HashTable hashTable : hashTables) { + hashTable.add(row); } + numOfRows++; + } - @Override - public Collection> match(Row row) { - Set> allMatched = Sets.newIdentityHashSet(); - for (HashTable hashTable : hashTables) { - allMatched.addAll(hashTable.match(row)); - } - return allMatched; + @Override + public Collection> match(Row row) { + Set> allMatched = Sets.newIdentityHashSet(); + for (HashTable hashTable : hashTables) { + allMatched.addAll(hashTable.match(row)); } + return allMatched; + } - @SuppressWarnings("unchecked") - public Map>[] rightFieldWithLeftValues() { - return Arrays.stream(hashTables). - map(hashTable -> hashTable.rightFieldWithLeftValues()[0]). // Make interface consistent - toArray(Map[]::new); - } + @SuppressWarnings("unchecked") + public Map>[] rightFieldWithLeftValues() { + return Arrays.stream(hashTables) + // Make interface consistent + .map(hashTable -> hashTable.rightFieldWithLeftValues()[0]) + .toArray(Map[]::new); + } - @Override - public boolean isEmpty() { - return numOfRows == 0; - } + @Override + public boolean isEmpty() { + return numOfRows == 0; + } - @Override - public int size() { - return numOfRows; - } + @Override + public int size() { + return numOfRows; + } - @Override - public void clear() { - for (HashTable hashTable : hashTables) { - hashTable.clear(); - } - numOfRows = 0; + @Override + public void clear() { + for (HashTable hashTable : hashTables) { + hashTable.clear(); } - + numOfRows = 0; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java index 07f008bea4..9fcb977beb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static java.util.Collections.emptyList; @@ -31,253 +30,232 @@ */ public abstract class JoinAlgorithm extends BatchPhysicalOperator { - protected static final Logger LOG = LogManager.getLogger(); - - /** - * Left child operator - */ - private final PhysicalOperator left; - - /** - * Right child operator handled by concrete join algorithm subclass - */ - protected final PhysicalOperator right; - - /** - * Join type ex. inner join, left join - */ - private final JoinType type; - - /** - * Joined columns in ON conditions - */ - private final JoinCondition condition; - - /** - * Block size calculator - */ - private final BlockSize blockSize; - - /** - * Bookkeeping unmatched rows in current block from left - */ - private final Set> leftMismatch; - - /** - * Hash table for right table probing - */ - protected HashTable hashTable; - - /** - * Execute params to reset right side for each left block - */ - protected ExecuteParams params; - - JoinAlgorithm(PhysicalOperator left, - PhysicalOperator right, - JoinType type, - JoinCondition condition, - BlockSize blockSize) { - this.left = left; - this.right = right; - this.type = type; - this.condition = condition; - this.blockSize = blockSize; - this.hashTable = new HashTableGroup<>(condition); - this.leftMismatch = Sets.newIdentityHashSet(); - } - - @Override - public PlanNode[] children() { - return new PlanNode[]{left, right}; - } - - @Override - public void open(ExecuteParams params) throws Exception { - super.open(params); - left.open(params); - this.params = params; - } - - @Override - public void close() { - super.close(); - hashTable.clear(); - leftMismatch.clear(); - LOG.debug("Cleared all resources used by join"); - } + protected static final Logger LOG = LogManager.getLogger(); + + /** Left child operator */ + private final PhysicalOperator left; + + /** Right child operator handled by concrete join algorithm subclass */ + protected final PhysicalOperator right; + + /** Join type ex. inner join, left join */ + private final JoinType type; + + /** Joined columns in ON conditions */ + private final JoinCondition condition; + + /** Block size calculator */ + private final BlockSize blockSize; + + /** Bookkeeping unmatched rows in current block from left */ + private final Set> leftMismatch; + + /** Hash table for right table probing */ + protected HashTable hashTable; + + /** Execute params to reset right side for each left block */ + protected ExecuteParams params; + + JoinAlgorithm( + PhysicalOperator left, + PhysicalOperator right, + JoinType type, + JoinCondition condition, + BlockSize blockSize) { + this.left = left; + this.right = right; + this.type = type; + this.condition = condition; + this.blockSize = blockSize; + this.hashTable = new HashTableGroup<>(condition); + this.leftMismatch = Sets.newIdentityHashSet(); + } + + @Override + public PlanNode[] children() { + return new PlanNode[] {left, right}; + } + + @Override + public void open(ExecuteParams params) throws Exception { + super.open(params); + left.open(params); + this.params = params; + } + + @Override + public void close() { + super.close(); + hashTable.clear(); + leftMismatch.clear(); + LOG.debug("Cleared all resources used by join"); + } /** * Build-probe left and right block by block to prefetch next matches (and mismatches if outer join). - *

- * 1) Build hash table and open right side. - * 2) Keep probing right to find matched rows (meanwhile update mismatched set) - * 3) Check if any row in mismatched set to return in the case of outer join. - * 4) Nothing remained now, move on to next block of left. Go back to step 1. - *

+ *

    + *
  1. Build hash table and open right side. + *
  2. Keep probing right to find matched rows (meanwhile update mismatched set) + *
  3. Check if any row in mismatched set to return in the case of outer join. + *
  4. Nothing remained now, move on to next block of left. Go back to step 1. + *
* This is a new run AND no block from left means algorithm should stop and return empty. */ @Override protected Collection> prefetch() throws Exception { while (!isNewRunButNoMoreBlockFromLeft()) { - // 1.Build hash table and (re-)open right side for the new run - if (isNewRun()) { - buildHashTableByNextBlock(); - reopenRight(); - } - - // 2.Keep probing right by the hash table and bookkeeping mismatch - while (isAnyMoreDataFromRight()) { - Collection> matched = probeMatchAndBookkeepMismatch(); - if (!matched.isEmpty()) { - return matched; - } - } - - // 3.You know it's a mismatch only after this run finished (left block + all right). - if (isAnyMismatchForOuterJoin()) { - return returnAndClearMismatch(); - } - - // 4.Clean up and close right - cleanUpAndCloseRight(); - } - return emptyList(); - } - - /** - * Probe right by hash table built from left. Handle matched and mismatched rows. - */ - private Collection> probeMatchAndBookkeepMismatch() { - if (hashTable.isEmpty()) { - throw new IllegalStateException("Hash table is NOT supposed to be empty"); + // 1.Build hash table and (re-)open right side for the new run + if (isNewRun()) { + buildHashTableByNextBlock(); + reopenRight(); + } + + // 2.Keep probing right by the hash table and bookkeeping mismatch + while (isAnyMoreDataFromRight()) { + Collection> matched = probeMatchAndBookkeepMismatch(); + if (!matched.isEmpty()) { + return matched; } + } - List> combinedRows = probe(); + // 3.You know it's a mismatch only after this run finished (left block + all right). + if (isAnyMismatchForOuterJoin()) { + return returnAndClearMismatch(); + } - List> matchRows = new ArrayList<>(); - if (combinedRows.isEmpty()) { - LOG.debug("No matched row found"); - } else { - if (LOG.isTraceEnabled()) { - combinedRows.forEach(row -> LOG.trace("Matched row before combined: {}", row)); - } + // 4.Clean up and close right + cleanUpAndCloseRight(); + } + return emptyList(); + } - for (CombinedRow row : combinedRows) { - matchRows.addAll(row.combine()); - } + /** Probe right by hash table built from left. Handle matched and mismatched rows. */ + private Collection> probeMatchAndBookkeepMismatch() { + if (hashTable.isEmpty()) { + throw new IllegalStateException("Hash table is NOT supposed to be empty"); + } - if (LOG.isTraceEnabled()) { - matchRows.forEach(row -> LOG.trace("Matched row after combined: {}", row)); - } + List> combinedRows = probe(); - bookkeepMismatchedRows(combinedRows); - } - return matchRows; - } + List> matchRows = new ArrayList<>(); + if (combinedRows.isEmpty()) { + LOG.debug("No matched row found"); + } else { + if (LOG.isTraceEnabled()) { + combinedRows.forEach(row -> LOG.trace("Matched row before combined: {}", row)); + } - private boolean isNewRunButNoMoreBlockFromLeft() { - return isNewRun() && !isAnyMoreBlockFromLeft(); - } + for (CombinedRow row : combinedRows) { + matchRows.addAll(row.combine()); + } - private boolean isNewRun() { - return hashTable.isEmpty(); - } + if (LOG.isTraceEnabled()) { + matchRows.forEach(row -> LOG.trace("Matched row after combined: {}", row)); + } - private boolean isAnyMoreBlockFromLeft() { - return left.hasNext(); + bookkeepMismatchedRows(combinedRows); } - - private boolean isAnyMoreDataFromRight() { - return right.hasNext(); + return matchRows; + } + + private boolean isNewRunButNoMoreBlockFromLeft() { + return isNewRun() && !isAnyMoreBlockFromLeft(); + } + + private boolean isNewRun() { + return hashTable.isEmpty(); + } + + private boolean isAnyMoreBlockFromLeft() { + return left.hasNext(); + } + + private boolean isAnyMoreDataFromRight() { + return right.hasNext(); + } + + private boolean isAnyMismatchForOuterJoin() { + return !leftMismatch.isEmpty(); + } + + /** Clone mismatch list and clear it so that we won't return it forever */ + @SuppressWarnings("unchecked") + private Collection> returnAndClearMismatch() { + if (LOG.isTraceEnabled()) { + leftMismatch.forEach(row -> LOG.trace("Mismatched rows before combined: {}", row)); } - private boolean isAnyMismatchForOuterJoin() { - return !leftMismatch.isEmpty(); + List> result = new ArrayList<>(); + for (Row row : leftMismatch) { + result.add(row.combine(Row.NULL)); } - /** - * Clone mismatch list and clear it so that we won't return it forever - */ - @SuppressWarnings("unchecked") - private Collection> returnAndClearMismatch() { - if (LOG.isTraceEnabled()) { - leftMismatch.forEach(row -> LOG.trace("Mismatched rows before combined: {}", row)); - } - - List> result = new ArrayList<>(); - for (Row row : leftMismatch) { - result.add(row.combine(Row.NULL)); - } - - if (LOG.isTraceEnabled()) { - result.forEach(row -> LOG.trace("Mismatched rows after combined: {}", row)); - } - leftMismatch.clear(); - return result; + if (LOG.isTraceEnabled()) { + result.forEach(row -> LOG.trace("Mismatched rows after combined: {}", row)); } - - /** - * Building phase: - * Build hash table from data block. - */ - private void buildHashTableByNextBlock() { - List> block = loadNextBlockFromLeft(blockSize.size()); - if (LOG.isTraceEnabled()) { - LOG.trace("Build hash table on conditions with block: {}, {}", condition, block); - } - - for (Row data : block) { - hashTable.add(data); - } - - if (type == JoinType.LEFT_OUTER_JOIN) { - leftMismatch.addAll(block); - } + leftMismatch.clear(); + return result; + } + + /** Building phase: Build hash table from data block. */ + private void buildHashTableByNextBlock() { + List> block = loadNextBlockFromLeft(blockSize.size()); + if (LOG.isTraceEnabled()) { + LOG.trace("Build hash table on conditions with block: {}, {}", condition, block); } - private void cleanUpAndCloseRight() { - LOG.debug("No more data from right. Clean up and close right."); - hashTable.clear(); - leftMismatch.clear(); - right.close(); + for (Row data : block) { + hashTable.add(data); } - private List> loadNextBlockFromLeft(int blockSize) { - List> block = new ArrayList<>(); - for (int i = 0; i < blockSize && left.hasNext(); i++) { - block.add(left.next()); - } - return block; + if (type == JoinType.LEFT_OUTER_JOIN) { + leftMismatch.addAll(block); } - - private void bookkeepMismatchedRows(List> combinedRows) { - if (type == JoinType.LEFT_OUTER_JOIN) { - for (CombinedRow row : combinedRows) { - leftMismatch.removeAll(row.leftMatchedRows()); - } - } + } + + private void cleanUpAndCloseRight() { + LOG.debug("No more data from right. Clean up and close right."); + hashTable.clear(); + leftMismatch.clear(); + right.close(); + } + + private List> loadNextBlockFromLeft(int blockSize) { + List> block = new ArrayList<>(); + for (int i = 0; i < blockSize && left.hasNext(); i++) { + block.add(left.next()); } - - /** - * (Re-)open right side by params. - */ - protected abstract void reopenRight() throws Exception; - - - /** - * Probing phase - * - * @return matched rows from left and right in - */ - protected abstract List> probe(); - - - @Override - public String toString() { - return getClass().getSimpleName() + "[ conditions=" + condition - + ", type=" + type + ", blockSize=[" + blockSize + "] ]"; + return block; + } + + private void bookkeepMismatchedRows(List> combinedRows) { + if (type == JoinType.LEFT_OUTER_JOIN) { + for (CombinedRow row : combinedRows) { + leftMismatch.removeAll(row.leftMatchedRows()); + } } - + } + + /** (Re-)open right side by params. */ + protected abstract void reopenRight() throws Exception; + + /** + * Probing phase + * + * @return matched rows from left and right in + */ + protected abstract List> probe(); + + @Override + public String toString() { + return getClass().getSimpleName() + + "[ conditions=" + + condition + + ", type=" + + type + + ", blockSize=[" + + blockSize + + "] ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java index 5d39529632..baf0af8c86 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import java.util.ArrayList; @@ -13,41 +12,39 @@ import java.util.Map; import org.opensearch.sql.legacy.query.planner.physical.Row; -/** - * List implementation to avoid normal hash table degrading into linked list. - */ +/** List implementation to avoid normal hash table degrading into linked list. */ public class ListHashTable implements HashTable { - private List> rows = new ArrayList<>(); - - @Override - public void add(Row row) { - rows.add(row); - } - - @Override - public Collection> match(Row row) { - return rows; - } - - @SuppressWarnings("unchecked") - @Override - public Map>[] rightFieldWithLeftValues() { - return new Map[]{new HashMap()}; - } - - @Override - public int size() { - return rows.size(); - } - - @Override - public boolean isEmpty() { - return rows.isEmpty(); - } - - @Override - public void clear() { - rows.clear(); - } + private List> rows = new ArrayList<>(); + + @Override + public void add(Row row) { + rows.add(row); + } + + @Override + public Collection> match(Row row) { + return rows; + } + + @SuppressWarnings("unchecked") + @Override + public Map>[] rightFieldWithLeftValues() { + return new Map[] {new HashMap()}; + } + + @Override + public int size() { + return rows.size(); + } + + @Override + public boolean isEmpty() { + return rows.isEmpty(); + } + + @Override + public void clear() { + rows.clear(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java index 7990b8c8d4..339e326cc3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java @@ -3,28 +3,25 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.blocksize; -/** - * Adaptive block size calculator based on resource usage dynamically. - */ +/** Adaptive block size calculator based on resource usage dynamically. */ public class AdaptiveBlockSize implements BlockSize { - private int upperLimit; + private int upperLimit; - public AdaptiveBlockSize(int upperLimit) { - this.upperLimit = upperLimit; - } + public AdaptiveBlockSize(int upperLimit) { + this.upperLimit = upperLimit; + } - @Override - public int size() { - //TODO: calculate dynamically on each call - return upperLimit; - } + @Override + public int size() { + // TODO: calculate dynamically on each call + return upperLimit; + } - @Override - public String toString() { - return "AdaptiveBlockSize with " + "upperLimit=" + upperLimit; - } + @Override + public String toString() { + return "AdaptiveBlockSize with " + "upperLimit=" + upperLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java index 10b36f2483..52bc42587f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java @@ -3,19 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.monitor; -/** - * Interface for different monitor component - */ +/** Interface for different monitor component */ public interface Monitor { - /** - * Is resource being monitored exhausted. - * - * @return true if yes - */ - boolean isHealthy(); - + /** + * Is resource being monitored exhausted. + * + * @return true if yes + */ + boolean isHealthy(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java index 6c708b91b0..9863862af9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; -/** - * Util class for identifier expression parsing - */ +/** Util class for identifier expression parsing */ class Identifier { - private final SQLIdentifierExpr idExpr; + private final SQLIdentifierExpr idExpr; - Identifier(SQLIdentifierExpr idExpr) { - this.idExpr = idExpr; - } + Identifier(SQLIdentifierExpr idExpr) { + this.idExpr = idExpr; + } - String name() { - return idExpr.getName(); - } + String name() { + return idExpr.getName(); + } - boolean hasPrefix() { - return firstDotIndex() != -1; - } + boolean hasPrefix() { + return firstDotIndex() != -1; + } - /** Assumption: identifier has prefix */ - String prefix() { - return name().substring(0, firstDotIndex()); - } + /** Assumption: identifier has prefix */ + String prefix() { + return name().substring(0, firstDotIndex()); + } - /** Assumption: identifier has prefix */ - void removePrefix() { - String nameWithoutPrefix = name().substring(prefix().length() + 1); - idExpr.setName(nameWithoutPrefix); - } + /** Assumption: identifier has prefix */ + void removePrefix() { + String nameWithoutPrefix = name().substring(prefix().length() + 1); + idExpr.setName(nameWithoutPrefix); + } - private int firstDotIndex() { - return name().indexOf('.', 1); - } + private int firstDotIndex() { + return name().indexOf('.', 1); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java index b32803561e..69178b7e83 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.join; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; @@ -28,23 +27,21 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** + *
  *  Rewrite rule to add table alias to columnNames for JOIN queries without table alias.
  * 

* We use a map from columnName to tableName. This is required to remove any ambiguity * while mapping fields to right table. If there is no explicit alias we create one and use that * to prefix columnName. * - * Different tableName on either side of join: - * Case a: If columnName(without alias) present in both tables, throw error. - * Case b: If columnName already has some alias, and that alias is a table name, - * change it to explicit alias of that table. - * Case c: If columnName is unique to a table + *

Different tableName on either side of join: Case a: If columnName(without alias) present in + * both tables, throw error. Case b: If columnName already has some alias, and that alias is a table + * name, change it to explicit alias of that table. Case c: If columnName is unique to a table + * + *

Same tableName on either side of join: Case a: If neither has explicit alias, throw error. + * Case b: If any one table has explicit alias, use explicit alias of other table for columnNames + * with tableName as prefix. (See below example) * - * Same tableName on either side of join: - * Case a: If neither has explicit alias, throw error. - * Case b: If any one table has explicit alias, - * use explicit alias of other table for columnNames with tableName as prefix. (See below example) - *

  *       ex: SELECT table.field_a , a.field_b  | SELECT table.field_a , a.field_b
  *            FROM table a                     |  FROM table
  *             JOIN table                      |   JOIN table a
@@ -54,164 +51,172 @@
  *                            FROM table a
  *                             JOIN table table_0
  *                              ON table_0.field_c = a.field_d
- *
- *

+ *
*/ public class JoinRewriteRule implements RewriteRule { - private static final String DOT = "."; - private int aliasSuffix = 0; - private final LocalClusterState clusterState; + private static final String DOT = "."; + private int aliasSuffix = 0; + private final LocalClusterState clusterState; - public JoinRewriteRule(LocalClusterState clusterState) { - this.clusterState = clusterState; - } + public JoinRewriteRule(LocalClusterState clusterState) { + this.clusterState = clusterState; + } - @Override - public boolean match(SQLQueryExpr root) { - return isJoin(root); - } - - private boolean isJoin(SQLQueryExpr sqlExpr) { - SQLSelectQuery sqlSelectQuery = sqlExpr.getSubQuery().getQuery(); + @Override + public boolean match(SQLQueryExpr root) { + return isJoin(root); + } - if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { - return false; - } + private boolean isJoin(SQLQueryExpr sqlExpr) { + SQLSelectQuery sqlSelectQuery = sqlExpr.getSubQuery().getQuery(); - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; - return query.getFrom() instanceof SQLJoinTableSource - && ((SQLJoinTableSource) query.getFrom()).getJoinType() != SQLJoinTableSource.JoinType.COMMA; + if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { + return false; } - @Override - public void rewrite(SQLQueryExpr root) { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; + return query.getFrom() instanceof SQLJoinTableSource + && ((SQLJoinTableSource) query.getFrom()).getJoinType() + != SQLJoinTableSource.JoinType.COMMA; + } - final Multimap tableByFieldName = ArrayListMultimap.create(); - final Map tableNameToAlias = new HashMap<>(); + @Override + public void rewrite(SQLQueryExpr root) { - // Used to handle case of same tableNames in JOIN - final Set explicitAliases = new HashSet<>(); + final Multimap tableByFieldName = ArrayListMultimap.create(); + final Map tableNameToAlias = new HashMap<>(); - visitTable(root, tableExpr -> { - // Copied from SubqueryAliasRewriter ; Removes index type name if any - String tableName = tableExpr.getExpr().toString().replaceAll(" ", "").split("/")[0]; + // Used to handle case of same tableNames in JOIN + final Set explicitAliases = new HashSet<>(); - if (tableExpr.getAlias() == null) { - String alias = createAlias(tableName); - tableExpr.setAlias(alias); - explicitAliases.add(alias); - } + visitTable( + root, + tableExpr -> { + // Copied from SubqueryAliasRewriter ; Removes index type name if any + String tableName = tableExpr.getExpr().toString().replaceAll(" ", "").split("/")[0]; - Table table = new Table(tableName, tableExpr.getAlias()); + if (tableExpr.getAlias() == null) { + String alias = createAlias(tableName); + tableExpr.setAlias(alias); + explicitAliases.add(alias); + } - tableNameToAlias.put(table.getName(), table.getAlias()); + Table table = new Table(tableName, tableExpr.getAlias()); - FieldMappings fieldMappings = clusterState. getFieldMappings( - new String[]{tableName}).firstMapping(); - fieldMappings.flat((fieldName, type) -> tableByFieldName.put(fieldName, table)); - }); + tableNameToAlias.put(table.getName(), table.getAlias()); - //Handling cases for same tableName on either side of JOIN - if (tableNameToAlias.size() == 1) { - String tableName = tableNameToAlias.keySet().iterator().next(); - if (explicitAliases.size() == 2) { - // Neither table has explicit alias - throw new VerificationException(StringUtils.format("Not unique table/alias: [%s]", tableName)); - } else if (explicitAliases.size() == 1) { - // One table has explicit alias; use created alias for other table as alias to override fields - // starting with actual tableName as alias to explicit alias - tableNameToAlias.put(tableName, explicitAliases.iterator().next()); - } - } - - visitColumnName(root, idExpr -> { - String columnName = idExpr.getName(); - Collection
tables = tableByFieldName.get(columnName); - if (tables.size() > 1) { - // columnName without alias present in both tables - throw new VerificationException(StringUtils.format("Field name [%s] is ambiguous", columnName)); - } else if (tables.isEmpty()) { - // size() == 0? - // 1. Either the columnName does not exist (handled by SemanticAnalyzer [SemanticAnalysisException]) - // 2. Or column starts with tableName as alias or explicit alias - // If starts with tableName as alias change to explicit alias - tableNameToAlias.keySet().stream().forEach(tableName -> { - if (columnName.startsWith(tableName + DOT)) { - idExpr.setName(columnName.replace(tableName + DOT, tableNameToAlias.get(tableName) + DOT)); - } - }); - } else { - // columnName with any alias and unique to one table - Table table = tables.iterator().next(); - idExpr.setName(String.join(DOT, table.getAlias(), columnName)); - } + FieldMappings fieldMappings = + clusterState.getFieldMappings(new String[] {tableName}).firstMapping(); + fieldMappings.flat((fieldName, type) -> tableByFieldName.put(fieldName, table)); }); + + // Handling cases for same tableName on either side of JOIN + if (tableNameToAlias.size() == 1) { + String tableName = tableNameToAlias.keySet().iterator().next(); + if (explicitAliases.size() == 2) { + // Neither table has explicit alias + throw new VerificationException( + StringUtils.format("Not unique table/alias: [%s]", tableName)); + } else if (explicitAliases.size() == 1) { + // One table has explicit alias; use created alias for other table as alias to override + // fields + // starting with actual tableName as alias to explicit alias + tableNameToAlias.put(tableName, explicitAliases.iterator().next()); + } } - private void visitTable(SQLQueryExpr root, - Consumer visit) { - root.accept(new MySqlASTVisitorAdapter() { - @Override - public void endVisit(SQLExprTableSource tableExpr) { - visit.accept(tableExpr); - } + visitColumnName( + root, + idExpr -> { + String columnName = idExpr.getName(); + Collection
tables = tableByFieldName.get(columnName); + if (tables.size() > 1) { + // columnName without alias present in both tables + throw new VerificationException( + StringUtils.format("Field name [%s] is ambiguous", columnName)); + } else if (tables.isEmpty()) { + // size() == 0? + // 1. Either the columnName does not exist (handled by SemanticAnalyzer + // [SemanticAnalysisException]) + // 2. Or column starts with tableName as alias or explicit alias + // If starts with tableName as alias change to explicit alias + tableNameToAlias.keySet().stream() + .forEach( + tableName -> { + if (columnName.startsWith(tableName + DOT)) { + idExpr.setName( + columnName.replace( + tableName + DOT, tableNameToAlias.get(tableName) + DOT)); + } + }); + } else { + // columnName with any alias and unique to one table + Table table = tables.iterator().next(); + idExpr.setName(String.join(DOT, table.getAlias(), columnName)); + } }); - } + } + + private void visitTable(SQLQueryExpr root, Consumer visit) { + root.accept( + new MySqlASTVisitorAdapter() { + @Override + public void endVisit(SQLExprTableSource tableExpr) { + visit.accept(tableExpr); + } + }); + } + + private void visitColumnName(SQLQueryExpr expr, Consumer visit) { + expr.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLExprTableSource x) { + // Avoid rewriting identifier in table name + return false; + } - private void visitColumnName(SQLQueryExpr expr, - Consumer visit) { - expr.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLExprTableSource x) { - // Avoid rewriting identifier in table name - return false; - } - - @Override - public void endVisit(SQLIdentifierExpr idExpr) { - visit.accept(idExpr); - } + @Override + public void endVisit(SQLIdentifierExpr idExpr) { + visit.accept(idExpr); + } }); - } + } - private String createAlias(String alias) { - return String.format("%s_%d", alias, next()); - } + private String createAlias(String alias) { + return String.format("%s_%d", alias, next()); + } - private Integer next() { - return aliasSuffix++; - } + private Integer next() { + return aliasSuffix++; + } - private static class Table { + private static class Table { - public String getName() { - return name; - } + public String getName() { + return name; + } - public String getAlias() { - return alias; - } + public String getAlias() { + return alias; + } - /** - * Table Name. - */ - private String name; + /** Table Name. */ + private String name; - /** - * Table Alias. - */ - private String alias; + /** Table Alias. */ + private String alias; - Table(String name, String alias) { - this.name = name; - this.alias = alias; - } + Table(String name, String alias) { + this.name = name; + this.alias = alias; + } - // Added for debugging - @Override - public String toString() { - return this.name + "-->" + this.alias; - } + // Added for debugging + @Override + public String toString() { + return this.name + "-->" + this.alias; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java index 609d26f4a1..b39907366e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType.COMMA; @@ -14,68 +13,65 @@ import com.alibaba.druid.sql.ast.statement.SQLTableSource; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock; -/** - * Table (OpenSearch Index) expression in FROM statement. - */ +/** Table (OpenSearch Index) expression in FROM statement. */ class From extends SQLClause { - From(SQLTableSource expr) { - super(expr); + From(SQLTableSource expr) { + super(expr); + } + + /** + * Collect nested field(s) information and then remove them from FROM statement. Assumption: only + * 1 regular table in FROM (which is the first one) and nested field(s) has alias. + */ + @Override + void rewrite(Scope scope) { + if (!isJoin()) { + return; } - /** - * Collect nested field(s) information and then remove them from FROM statement. - * Assumption: only 1 regular table in FROM (which is the first one) and nested field(s) has alias. - */ - @Override - void rewrite(Scope scope) { - if (!isJoin()) { - return; - } - - // At this point, FROM expr is SQLJoinTableSource. - if (!isCommaJoin()) { - scope.setActualJoinType(((SQLJoinTableSource) expr).getJoinType()); - ((SQLJoinTableSource) expr).setJoinType(COMMA); - } - - if (parentAlias(scope).isEmpty()) { - // Could also be empty now since normal JOIN tables may not have alias - if (scope.getActualJoinType() != null) { - ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); - } - return; - } - - collectNestedFields(scope); - if (scope.isAnyNestedField()) { - eraseParentAlias(); - keepParentTableOnly(); - } else if (scope.getActualJoinType() != null){ - // set back the JoinType to original value if non COMMA JOIN on regular tables - ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); - } + // At this point, FROM expr is SQLJoinTableSource. + if (!isCommaJoin()) { + scope.setActualJoinType(((SQLJoinTableSource) expr).getJoinType()); + ((SQLJoinTableSource) expr).setJoinType(COMMA); } - private String parentAlias(Scope scope) { - scope.setParentAlias(((SQLJoinTableSource) expr).getLeft().getAlias()); - return emptyIfNull(scope.getParentAlias()); + if (parentAlias(scope).isEmpty()) { + // Could also be empty now since normal JOIN tables may not have alias + if (scope.getActualJoinType() != null) { + ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); + } + return; } - /** - * Erase alias otherwise NLPchina has problem parsing nested field like 't.employees.name' - */ - private void eraseParentAlias() { - left().expr.setAlias(null); + collectNestedFields(scope); + if (scope.isAnyNestedField()) { + eraseParentAlias(); + keepParentTableOnly(); + } else if (scope.getActualJoinType() != null) { + // set back the JoinType to original value if non COMMA JOIN on regular tables + ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); } + } - private void keepParentTableOnly() { - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) expr.getParent(); - query.setFrom(left().expr); - left().expr.setParent(query); - } + private String parentAlias(Scope scope) { + scope.setParentAlias(((SQLJoinTableSource) expr).getLeft().getAlias()); + return emptyIfNull(scope.getParentAlias()); + } + + /** Erase alias otherwise NLPchina has problem parsing nested field like 't.employees.name' */ + private void eraseParentAlias() { + left().expr.setAlias(null); + } + + private void keepParentTableOnly() { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) expr.getParent(); + query.setFrom(left().expr); + left().expr.setParent(query); + } /** + *
      * Collect path alias and full path mapping of nested field in FROM clause.
      * Sample:
      * FROM team t, t.employees e ...
@@ -88,6 +84,7 @@ private void keepParentTableOnly() {
      * 

* t.employees is nested because path "t" == parentAlias "t" * Save path alias to full path name mapping {"e": "employees"} to Scope + *

*/ private void collectNestedFields(Scope scope) { From clause = this; @@ -97,36 +94,35 @@ private void collectNestedFields(Scope scope) { clause.addIfNestedField(scope); } - private boolean isCommaJoin() { - return expr instanceof SQLJoinTableSource && ((SQLJoinTableSource) expr).getJoinType() == COMMA; - } + private boolean isCommaJoin() { + return expr instanceof SQLJoinTableSource && ((SQLJoinTableSource) expr).getJoinType() == COMMA; + } - private boolean isJoin() { - return expr instanceof SQLJoinTableSource; - } + private boolean isJoin() { + return expr instanceof SQLJoinTableSource; + } - private From left() { - return new From(((SQLJoinTableSource) expr).getLeft()); - } + private From left() { + return new From(((SQLJoinTableSource) expr).getLeft()); + } - private From right() { - return new From(((SQLJoinTableSource) expr).getRight()); - } + private From right() { + return new From(((SQLJoinTableSource) expr).getRight()); + } - private void addIfNestedField(Scope scope) { - if (!(expr instanceof SQLExprTableSource - && ((SQLExprTableSource) expr).getExpr() instanceof SQLIdentifierExpr)) { - return; - } - - Identifier table = new Identifier((SQLIdentifierExpr) ((SQLExprTableSource) expr).getExpr()); - if (table.path().equals(scope.getParentAlias())) { - scope.addAliasFullPath(emptyIfNull(expr.getAlias()), table.name()); - } + private void addIfNestedField(Scope scope) { + if (!(expr instanceof SQLExprTableSource + && ((SQLExprTableSource) expr).getExpr() instanceof SQLIdentifierExpr)) { + return; } - private String emptyIfNull(String str) { - return str == null ? "" : str; + Identifier table = new Identifier((SQLIdentifierExpr) ((SQLExprTableSource) expr).getExpr()); + if (table.path().equals(scope.getParentAlias())) { + scope.addAliasFullPath(emptyIfNull(expr.getAlias()), table.name()); } + } + private String emptyIfNull(String str) { + return str == null ? "" : str; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java index 635cc63671..e3e1cfb7ce 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java @@ -3,95 +3,95 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; /** + *
  * Identifier expression in SELECT, FROM, WHERE, GROUP BY, ORDER BY etc.
- * 

+ * * Ex. To make concepts clear, for "e.firstname AND t.region" in "FROM team t, t.employees e": * parent alias (to erase): 't' * path: 'e' (full path saved in Scope is 'employees') * name: 'firstname' + *

*/ class Identifier extends SQLClause { - private static final String SEPARATOR = "."; - - Identifier(SQLIdentifierExpr expr) { - super(expr); - } - - /** - * Erase parent alias for all identifiers but only rewrite those (nested field identifier) NOT in WHERE. - * For identifier in conditions in WHERE, use full path as tag and delay the rewrite in Where.rewrite(). - */ - @Override - void rewrite(Scope scope) { - eraseParentAlias(scope); - if (isNestedField(scope)) { - renameByFullPath(scope); - if (isInCondition()) { - useFullPathAsTag(scope); - } else { - replaceByNestedFunction(expr, pathFromIdentifier(expr)); - } - } - } - - /** - * return the path of the expr name. e.g. - * expecting p returned as path in both WHERE p.name = 'A' and WHERE p IS NULL cases, - * in which expr.name = p.name and p separately - */ - String path() { - return separatorIndex() == -1 ? expr.getName() : expr.getName().substring(0, separatorIndex()); - } - - String name() { - return expr.getName().substring(separatorIndex() + 1); + private static final String SEPARATOR = "."; + + Identifier(SQLIdentifierExpr expr) { + super(expr); + } + + /** + * Erase parent alias for all identifiers but only rewrite those (nested field identifier) NOT in + * WHERE. For identifier in conditions in WHERE, use full path as tag and delay the rewrite in + * Where.rewrite(). + */ + @Override + void rewrite(Scope scope) { + eraseParentAlias(scope); + if (isNestedField(scope)) { + renameByFullPath(scope); + if (isInCondition()) { + useFullPathAsTag(scope); + } else { + replaceByNestedFunction(expr, pathFromIdentifier(expr)); + } } - - private int separatorIndex() { - return expr.getName().indexOf(SEPARATOR); - } - - /** - * Erase parent alias otherwise it's required to specify it everywhere even on nested - * field (which NLPchina has problem with). - * Sample: "FROM team t, t.employees e WHERE t.region = 'US'" => "WHERE region = 'US'" - */ - private void eraseParentAlias(Scope scope) { - if (isStartWithParentAlias(scope)) { - expr.setName(name()); - } - } - - private boolean isStartWithParentAlias(Scope scope) { - return path().equals(scope.getParentAlias()); + } + + /** + * return the path of the expr name. e.g. expecting p returned as path in both WHERE p.name = 'A' + * and WHERE p IS NULL cases, in which expr.name = p.name and p separately + */ + String path() { + return separatorIndex() == -1 ? expr.getName() : expr.getName().substring(0, separatorIndex()); + } + + String name() { + return expr.getName().substring(separatorIndex() + 1); + } + + private int separatorIndex() { + return expr.getName().indexOf(SEPARATOR); + } + + /** + * Erase parent alias otherwise it's required to specify it everywhere even on nested field (which + * NLPchina has problem with). Sample: "FROM team t, t.employees e WHERE t.region = 'US'" => + * "WHERE region = 'US'" + */ + private void eraseParentAlias(Scope scope) { + if (isStartWithParentAlias(scope)) { + expr.setName(name()); } + } - private boolean isNestedField(Scope scope) { - return !scope.getFullPath(path()).isEmpty(); - } + private boolean isStartWithParentAlias(Scope scope) { + return path().equals(scope.getParentAlias()); + } - private void renameByFullPath(Scope scope) { - String fullPath = scope.getFullPath(path()); - if (fullPath.isEmpty()) { - throw new IllegalStateException("Full path not found for identifier:" + expr.getName()); - } - expr.setName(expr.getName().replaceFirst(path(), fullPath)); - } + private boolean isNestedField(Scope scope) { + return !scope.getFullPath(path()).isEmpty(); + } - private void useFullPathAsTag(Scope scope) { - scope.addConditionTag((SQLBinaryOpExpr) expr.getParent(), path()); + private void renameByFullPath(Scope scope) { + String fullPath = scope.getFullPath(path()); + if (fullPath.isEmpty()) { + throw new IllegalStateException("Full path not found for identifier:" + expr.getName()); } + expr.setName(expr.getName().replaceFirst(path(), fullPath)); + } - private boolean isInCondition() { - return expr.getParent() instanceof SQLBinaryOpExpr; - } + private void useFullPathAsTag(Scope scope) { + scope.addConditionTag((SQLBinaryOpExpr) expr.getParent(), path()); + } + private boolean isInCondition() { + return expr.getParent() instanceof SQLBinaryOpExpr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java index 99505e5e49..281918d52c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -17,28 +16,31 @@ import org.opensearch.sql.legacy.rewriter.subquery.RewriterContext; /** + *
  * IN Subquery Rewriter.
  * For example,
  * SELECT * FROM A WHERE a IN (SELECT b FROM B) and c > 10 should be rewritten to
  * SELECT A.* FROM A JOIN B ON A.a = B.b WHERE c > 10 and B.b IS NOT NULL.
+ * 
*/ public class InRewriter implements Rewriter { - private final SQLInSubQueryExpr inExpr; - private final RewriterContext ctx; - private final MySqlSelectQueryBlock queryBlock; + private final SQLInSubQueryExpr inExpr; + private final RewriterContext ctx; + private final MySqlSelectQueryBlock queryBlock; - public InRewriter(SQLInSubQueryExpr inExpr, RewriterContext ctx) { - this.inExpr = inExpr; - this.ctx = ctx; - this.queryBlock = (MySqlSelectQueryBlock) inExpr.getSubQuery().getQuery(); - } + public InRewriter(SQLInSubQueryExpr inExpr, RewriterContext ctx) { + this.inExpr = inExpr; + this.ctx = ctx; + this.queryBlock = (MySqlSelectQueryBlock) inExpr.getSubQuery().getQuery(); + } - @Override - public boolean canRewrite() { - return !inExpr.isNot(); - } + @Override + public boolean canRewrite() { + return !inExpr.isNot(); + } /** + *
      * Build Where clause from input query.
      * 

* With the input query. @@ -54,26 +56,27 @@ public boolean canRewrite() { * | | * b B *

- * + *

*/ @Override public void rewrite() { SQLTableSource from = queryBlock.getFrom(); addJoinTable(from); - SQLExpr where = queryBlock.getWhere(); - if (null == where) { - ctx.addWhere(generateNullOp()); - } else if (where instanceof SQLBinaryOpExpr) { - ctx.addWhere(and(generateNullOp(), (SQLBinaryOpExpr) where)); - } else { - throw new IllegalStateException("unsupported where class type " + where.getClass()); - } + SQLExpr where = queryBlock.getWhere(); + if (null == where) { + ctx.addWhere(generateNullOp()); + } else if (where instanceof SQLBinaryOpExpr) { + ctx.addWhere(and(generateNullOp(), (SQLBinaryOpExpr) where)); + } else { + throw new IllegalStateException("unsupported where class type " + where.getClass()); } + } /** - * Build the Null check expression. For example, - * SELECT * FROM A WHERE a IN (SELECT b FROM B), should return B.b IS NOT NULL + * Build the Null check expression. For example,
+ * SELECT * FROM A WHERE a IN (SELECT b FROM B)
+ * should return B.b IS NOT NULL */ private SQLBinaryOpExpr generateNullOp() { SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); @@ -81,23 +84,24 @@ private SQLBinaryOpExpr generateNullOp() { binaryOpExpr.setRight(new SQLNullExpr()); binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); - return binaryOpExpr; - } + return binaryOpExpr; + } - /** - * Add the {@link SQLTableSource} with {@link JoinType} and {@link SQLBinaryOpExpr} to the {@link RewriterContext}. - */ - private void addJoinTable(SQLTableSource right) { - SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(inExpr.getExpr(), - SQLBinaryOperator.Equality, - fetchJoinExpr()); - ctx.addJoin(right, JoinType.JOIN, binaryOpExpr); - } + /** + * Add the {@link SQLTableSource} with {@link JoinType} and {@link SQLBinaryOpExpr} to the {@link + * RewriterContext}. + */ + private void addJoinTable(SQLTableSource right) { + SQLBinaryOpExpr binaryOpExpr = + new SQLBinaryOpExpr(inExpr.getExpr(), SQLBinaryOperator.Equality, fetchJoinExpr()); + ctx.addJoin(right, JoinType.JOIN, binaryOpExpr); + } - private SQLExpr fetchJoinExpr() { - if (queryBlock.getSelectList().size() > 1) { - throw new IllegalStateException("Unsupported subquery with multiple select " + queryBlock.getSelectList()); - } - return queryBlock.getSelectList().get(0).getExpr(); + private SQLExpr fetchJoinExpr() { + if (queryBlock.getSelectList().size() > 1) { + throw new IllegalStateException( + "Unsupported subquery with multiple select " + queryBlock.getSelectList()); } + return queryBlock.getSelectList().get(0).getExpr(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java index c7656e420f..26684f4f61 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -18,9 +17,9 @@ import org.opensearch.sql.legacy.rewriter.subquery.RewriterContext; /** - * Nested EXISTS SQL Rewriter. - * The EXISTS clause will be remove from the SQL. The translated SQL will use ElasticSearch's nested query logic. - * + * Nested EXISTS SQL Rewriter. The EXISTS clause will be remove from the SQL. The translated SQL + * will use ElasticSearch's nested query logic. + *
  * For example,
  * 

* SELECT e.name @@ -31,65 +30,65 @@ * FROM employee as e, e.projects as p * WHERE p is not null *

+ *
*/ public class NestedExistsRewriter implements Rewriter { - private final SQLExistsExpr existsExpr; - private final RewriterContext ctx; - private final SQLExprTableSource from; - private final SQLExpr where; + private final SQLExistsExpr existsExpr; + private final RewriterContext ctx; + private final SQLExprTableSource from; + private final SQLExpr where; - public NestedExistsRewriter(SQLExistsExpr existsExpr, RewriterContext board) { - this.existsExpr = existsExpr; - this.ctx = board; - MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) existsExpr.getSubQuery().getQuery(); - if (queryBlock.getFrom() instanceof SQLExprTableSource) { - this.from = (SQLExprTableSource) queryBlock.getFrom(); - } else { - throw new IllegalStateException("unsupported expression in from " + queryBlock.getFrom().getClass()); - } - this.where = queryBlock.getWhere(); + public NestedExistsRewriter(SQLExistsExpr existsExpr, RewriterContext board) { + this.existsExpr = existsExpr; + this.ctx = board; + MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) existsExpr.getSubQuery().getQuery(); + if (queryBlock.getFrom() instanceof SQLExprTableSource) { + this.from = (SQLExprTableSource) queryBlock.getFrom(); + } else { + throw new IllegalStateException( + "unsupported expression in from " + queryBlock.getFrom().getClass()); } + this.where = queryBlock.getWhere(); + } - /** - * The from table must be nested field. - */ - @Override - public boolean canRewrite() { - return ctx.isNestedQuery(from); - } + /** The from table must be nested field. */ + @Override + public boolean canRewrite() { + return ctx.isNestedQuery(from); + } - @Override - public void rewrite() { - ctx.addJoin(from, JoinType.COMMA); - ctx.addWhere(rewriteExistsWhere()); - } + @Override + public void rewrite() { + ctx.addJoin(from, JoinType.COMMA); + ctx.addWhere(rewriteExistsWhere()); + } - private SQLExpr rewriteExistsWhere() { - SQLBinaryOpExpr translatedWhere; - SQLBinaryOpExpr notMissingOp = buildNotMissingOp(); - if (null == where) { - translatedWhere = notMissingOp; - } else if (where instanceof SQLBinaryOpExpr) { - translatedWhere = and(notMissingOp, (SQLBinaryOpExpr) where); - } else { - throw new IllegalStateException("unsupported expression in where " + where.getClass()); - } + private SQLExpr rewriteExistsWhere() { + SQLBinaryOpExpr translatedWhere; + SQLBinaryOpExpr notMissingOp = buildNotMissingOp(); + if (null == where) { + translatedWhere = notMissingOp; + } else if (where instanceof SQLBinaryOpExpr) { + translatedWhere = and(notMissingOp, (SQLBinaryOpExpr) where); + } else { + throw new IllegalStateException("unsupported expression in where " + where.getClass()); + } - if (existsExpr.isNot()) { - SQLNotExpr sqlNotExpr = new SQLNotExpr(translatedWhere); - translatedWhere.setParent(sqlNotExpr); - return sqlNotExpr; - } else { - return translatedWhere; - } + if (existsExpr.isNot()) { + SQLNotExpr sqlNotExpr = new SQLNotExpr(translatedWhere); + translatedWhere.setParent(sqlNotExpr); + return sqlNotExpr; + } else { + return translatedWhere; } + } - private SQLBinaryOpExpr buildNotMissingOp() { - SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); - binaryOpExpr.setLeft(new SQLIdentifierExpr(from.getAlias())); - binaryOpExpr.setRight(new SQLIdentifierExpr("MISSING")); - binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); + private SQLBinaryOpExpr buildNotMissingOp() { + SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); + binaryOpExpr.setLeft(new SQLIdentifierExpr(from.getAlias())); + binaryOpExpr.setRight(new SQLIdentifierExpr("MISSING")); + binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); - return binaryOpExpr; - } + return binaryOpExpr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java index ec35151e4d..de6694d90d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.utils; import com.alibaba.druid.sql.ast.expr.SQLExistsExpr; @@ -12,43 +11,39 @@ import java.util.ArrayList; import java.util.List; -/** - * Visitor which try to find the SubQuery. - */ +/** Visitor which try to find the SubQuery. */ public class FindSubQuery extends MySqlASTVisitorAdapter { - private final List sqlInSubQueryExprs = new ArrayList<>(); - private final List sqlExistsExprs = new ArrayList<>(); - private boolean continueVisit = true; - - public FindSubQuery continueVisitWhenFound(boolean continueVisit) { - this.continueVisit = continueVisit; - return this; - } - - /** - * Return true if has SubQuery. - */ - public boolean hasSubQuery() { - return !sqlInSubQueryExprs.isEmpty() || !sqlExistsExprs.isEmpty(); - } - - @Override - public boolean visit(SQLInSubQueryExpr query) { - sqlInSubQueryExprs.add(query); - return continueVisit; - } - - @Override - public boolean visit(SQLExistsExpr query) { - sqlExistsExprs.add(query); - return continueVisit; - } - - public List getSqlInSubQueryExprs() { - return sqlInSubQueryExprs; - } - - public List getSqlExistsExprs() { - return sqlExistsExprs; - } + private final List sqlInSubQueryExprs = new ArrayList<>(); + private final List sqlExistsExprs = new ArrayList<>(); + private boolean continueVisit = true; + + public FindSubQuery continueVisitWhenFound(boolean continueVisit) { + this.continueVisit = continueVisit; + return this; + } + + /** Return true if has SubQuery. */ + public boolean hasSubQuery() { + return !sqlInSubQueryExprs.isEmpty() || !sqlExistsExprs.isEmpty(); + } + + @Override + public boolean visit(SQLInSubQueryExpr query) { + sqlInSubQueryExprs.add(query); + return continueVisit; + } + + @Override + public boolean visit(SQLExistsExpr query) { + sqlExistsExprs.add(query); + return continueVisit; + } + + public List getSqlInSubQueryExprs() { + return sqlInSubQueryExprs; + } + + public List getSqlExistsExprs() { + return sqlExistsExprs; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java index ecc86877ee..26f17feeb6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import com.fasterxml.jackson.core.JsonFactory; @@ -15,31 +14,29 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; -/** - * Utility Class for formatting Json string pretty. - */ +/** Utility Class for formatting Json string pretty. */ public class JsonPrettyFormatter { - /** - * @param jsonString Json string without/with pretty format - * @return A standard and pretty formatted json string - * @throws IOException - */ - public static String format(String jsonString) throws IOException { - //turn _explain response into pretty formatted Json - XContentBuilder contentBuilder = XContentFactory.jsonBuilder().prettyPrint(); - try ( - XContentParser contentParser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonString)) - ){ - contentBuilder.copyCurrentStructure(contentParser); - } - return contentBuilder.toString(); + /** + * @param jsonString Json string without/with pretty format + * @return A standard and pretty formatted json string + * @throws IOException + */ + public static String format(String jsonString) throws IOException { + // turn _explain response into pretty formatted Json + XContentBuilder contentBuilder = XContentFactory.jsonBuilder().prettyPrint(); + try (XContentParser contentParser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonString))) { + contentBuilder.copyCurrentStructure(contentParser); } + return contentBuilder.toString(); + } - private JsonPrettyFormatter() { - throw new AssertionError(getClass().getCanonicalName() + " is a utility class and must not be initialized"); - } + private JsonPrettyFormatter() { + throw new AssertionError( + getClass().getCanonicalName() + " is a utility class and must not be initialized"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java index d9e9271728..4a8c609f47 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import static org.hamcrest.Matchers.aMapWithSize; @@ -23,141 +22,136 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Test cases for environment - */ +/** Test cases for environment */ public class EnvironmentTest { - /** Use context class for push/pop */ - private final SemanticContext context = new SemanticContext(); - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolve() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - - // New environment 1 - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - environment().define(city, KEYWORD); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - Assert.assertTrue(environment().resolve(city).isPresent()); - - // New environment 2 - context.push(); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(manager, OBJECT); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - Assert.assertTrue(environment().resolve(city).isPresent()); - Assert.assertTrue(environment().resolve(manager).isPresent()); - } - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldNotAbleToResolveOncePopped() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - - // New environment - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(city, OBJECT); - environment().define(manager, OBJECT); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - Assert.assertTrue(environment().resolve(city).isPresent()); - Assert.assertTrue(environment().resolve(manager).isPresent()); - - context.pop(); - Assert.assertFalse(environment().resolve(city).isPresent()); - Assert.assertFalse(environment().resolve(manager).isPresent()); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - } - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveByPrefix() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - - // New environment 1 - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - environment().define(city, KEYWORD); - - // New environment 2 - context.push(); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(manager, OBJECT); - - Map typeByName = environment().resolveByPrefix(new Symbol(Namespace.FIELD_NAME, "s")); - assertThat( - typeByName, - allOf( - aMapWithSize(3), - hasEntry("s.birthday", DATE), - hasEntry("s.city", KEYWORD), - hasEntry("s.manager", OBJECT) - ) - ); - } - - @Test - public void defineFieldSymbolShouldBeAbleToResolveAll() { - environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects"), new OpenSearchIndex("s.projects", NESTED_FIELD)); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); - - Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(6), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.address", TEXT), - hasEntry("s.city", KEYWORD), - hasEntry("s.manager.name", TEXT) - ) - ); - } - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveAll() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - - // New environment 1 - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - environment().define(city, KEYWORD); - - // New environment 2 - context.push(); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(manager, OBJECT); - - Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(3), - hasEntry("s.birthday", DATE), - hasEntry("s.city", KEYWORD), - hasEntry("s.manager", OBJECT) - ) - ); - } - - private Environment environment() { - return context.peek(); - } - + /** Use context class for push/pop */ + private final SemanticContext context = new SemanticContext(); + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolve() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + + // New environment 1 + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + environment().define(city, KEYWORD); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + Assert.assertTrue(environment().resolve(city).isPresent()); + + // New environment 2 + context.push(); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(manager, OBJECT); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + Assert.assertTrue(environment().resolve(city).isPresent()); + Assert.assertTrue(environment().resolve(manager).isPresent()); + } + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldNotAbleToResolveOncePopped() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + + // New environment + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(city, OBJECT); + environment().define(manager, OBJECT); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + Assert.assertTrue(environment().resolve(city).isPresent()); + Assert.assertTrue(environment().resolve(manager).isPresent()); + + context.pop(); + Assert.assertFalse(environment().resolve(city).isPresent()); + Assert.assertFalse(environment().resolve(manager).isPresent()); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + } + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveByPrefix() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + + // New environment 1 + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + environment().define(city, KEYWORD); + + // New environment 2 + context.push(); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(manager, OBJECT); + + Map typeByName = + environment().resolveByPrefix(new Symbol(Namespace.FIELD_NAME, "s")); + assertThat( + typeByName, + allOf( + aMapWithSize(3), + hasEntry("s.birthday", DATE), + hasEntry("s.city", KEYWORD), + hasEntry("s.manager", OBJECT))); + } + + @Test + public void defineFieldSymbolShouldBeAbleToResolveAll() { + environment() + .define( + new Symbol(Namespace.FIELD_NAME, "s.projects"), + new OpenSearchIndex("s.projects", NESTED_FIELD)); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); + + Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(6), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.release", DATE), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.address", TEXT), + hasEntry("s.city", KEYWORD), + hasEntry("s.manager.name", TEXT))); + } + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveAll() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + + // New environment 1 + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + environment().define(city, KEYWORD); + + // New environment 2 + context.push(); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(manager, OBJECT); + + Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(3), + hasEntry("s.birthday", DATE), + hasEntry("s.city", KEYWORD), + hasEntry("s.manager", OBJECT))); + } + + private Environment environment() { + return context.peek(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java index db76c01947..0bd8b526bb 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static java.util.Collections.singletonList; @@ -19,34 +18,31 @@ import org.junit.Test; -/** - * Generic type test - */ +/** Generic type test */ public class GenericTypeTest { - @Test - public void passNumberArgToLogShouldReturnNumber() { - assertEquals(DOUBLE, LOG.construct(singletonList(NUMBER))); - } - - @Test - public void passIntegerArgToLogShouldReturnDouble() { - assertEquals(DOUBLE, LOG.construct(singletonList(INTEGER))); - } - - @Test - public void passLongArgToLogShouldReturnDouble() { - assertEquals(DOUBLE, LOG.construct(singletonList(LONG))); - } - - @Test - public void passTextArgToLogShouldReturnTypeError() { - assertEquals(TYPE_ERROR, LOG.construct(singletonList(TEXT))); - } - - @Test - public void passKeywordArgToLogShouldReturnTypeError() { - assertEquals(TYPE_ERROR, LOG.construct(singletonList(KEYWORD))); - } - + @Test + public void passNumberArgToLogShouldReturnNumber() { + assertEquals(DOUBLE, LOG.construct(singletonList(NUMBER))); + } + + @Test + public void passIntegerArgToLogShouldReturnDouble() { + assertEquals(DOUBLE, LOG.construct(singletonList(INTEGER))); + } + + @Test + public void passLongArgToLogShouldReturnDouble() { + assertEquals(DOUBLE, LOG.construct(singletonList(LONG))); + } + + @Test + public void passTextArgToLogShouldReturnTypeError() { + assertEquals(TYPE_ERROR, LOG.construct(singletonList(TEXT))); + } + + @Test + public void passKeywordArgToLogShouldReturnTypeError() { + assertEquals(TYPE_ERROR, LOG.construct(singletonList(KEYWORD))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java index fe8b25ed1c..e71fcfe10e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; @@ -22,109 +21,83 @@ import org.opensearch.sql.legacy.util.MatcherUtils; /** - * Unit test for {@code FieldMapping} with trivial methods ignored such as isSpecified, isMetaField etc. + * Unit test for {@code FieldMapping} with trivial methods ignored such as isSpecified, isMetaField + * etc. */ public class FieldMappingTest { - @Test - public void testFieldMatchesWildcardPatternSpecifiedInQuery() { - assertThat( - new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("employee.*")), - isWildcardSpecified(true) - ); - } - - @Test - public void testFieldMismatchesWildcardPatternSpecifiedInQuery() { - assertThat( - new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("manager.*")), - isWildcardSpecified(false) - ); - } - - @Test - public void testFieldIsProperty() { - assertThat( - new FieldMapping("employee.first"), - isPropertyField(true) - ); - } - - @Test - public void testNestedMultiFieldIsProperty() { - assertThat( - new FieldMapping("employee.first.keyword"), - isPropertyField(true) - ); - } - - @Test - public void testFieldIsNotProperty() { - assertThat( - new FieldMapping("employee"), - isPropertyField(false) - ); - } - - @Test - public void testMultiFieldIsNotProperty() { - assertThat( - new FieldMapping("employee.keyword"), - isPropertyField(false) - ); - } - - @Test - public void testUnknownFieldTreatedAsObject() { - assertThat( - new FieldMapping("employee"), - hasType("object") - ); - } - - @Test - public void testDeepNestedField() { - assertThat( - new FieldMapping( + @Test + public void testFieldMatchesWildcardPatternSpecifiedInQuery() { + assertThat( + new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("employee.*")), + isWildcardSpecified(true)); + } + + @Test + public void testFieldMismatchesWildcardPatternSpecifiedInQuery() { + assertThat( + new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("manager.*")), + isWildcardSpecified(false)); + } + + @Test + public void testFieldIsProperty() { + assertThat(new FieldMapping("employee.first"), isPropertyField(true)); + } + + @Test + public void testNestedMultiFieldIsProperty() { + assertThat(new FieldMapping("employee.first.keyword"), isPropertyField(true)); + } + + @Test + public void testFieldIsNotProperty() { + assertThat(new FieldMapping("employee"), isPropertyField(false)); + } + + @Test + public void testMultiFieldIsNotProperty() { + assertThat(new FieldMapping("employee.keyword"), isPropertyField(false)); + } + + @Test + public void testUnknownFieldTreatedAsObject() { + assertThat(new FieldMapping("employee"), hasType("object")); + } + + @Test + public void testDeepNestedField() { + assertThat( + new FieldMapping( + "employee.location.city", + ImmutableMap.of( "employee.location.city", - ImmutableMap.of( + new FieldMappingMetadata( "employee.location.city", - new FieldMappingMetadata("employee.location.city", new BytesArray( - "{\n" + - " \"city\" : {\n" + - " \"type\" : \"text\"\n" + - " }\n" + - "}") - ) - ), - emptyMap() - ), - hasType("text") - ); - } - - private Matcher isWildcardSpecified(boolean isMatched) { - return MatcherUtils.featureValueOf("is field match wildcard specified in query", - is(isMatched), - FieldMapping::isWildcardSpecified); - } - - private Matcher isPropertyField(boolean isProperty) { - return MatcherUtils.featureValueOf("isPropertyField", - is(isProperty), - FieldMapping::isPropertyField); - } - - private Matcher hasType(String expected) { - return MatcherUtils.featureValueOf("type", - is(expected), - FieldMapping::type); - } - - private Map fieldsSpecifiedInQuery(String...fieldNames) { - return Arrays.stream(fieldNames). - collect(Collectors.toMap(name -> name, - name -> new Field(name, ""))); - } - + new BytesArray( + "{\n" + " \"city\" : {\n" + " \"type\" : \"text\"\n" + " }\n" + "}"))), + emptyMap()), + hasType("text")); + } + + private Matcher isWildcardSpecified(boolean isMatched) { + return MatcherUtils.featureValueOf( + "is field match wildcard specified in query", + is(isMatched), + FieldMapping::isWildcardSpecified); + } + + private Matcher isPropertyField(boolean isProperty) { + return MatcherUtils.featureValueOf( + "isPropertyField", is(isProperty), FieldMapping::isPropertyField); + } + + private Matcher hasType(String expected) { + return MatcherUtils.featureValueOf("type", is(expected), FieldMapping::type); + } + + private Map fieldsSpecifiedInQuery(String... fieldNames) { + return Arrays.stream(fieldNames) + .collect(Collectors.toMap(name -> name, name -> new Field(name, ""))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java index 412c351c56..f6de8a98e6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,51 +22,47 @@ import org.junit.Test; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test for FieldMappings class - */ +/** Test for FieldMappings class */ public class FieldMappingsTest { - private static final String TEST_MAPPING_FILE = "mappings/field_mappings.json"; - - @Before - public void setUp() throws IOException { - URL url = Resources.getResource(TEST_MAPPING_FILE); - String mappings = Resources.toString(url, Charsets.UTF_8); - mockLocalClusterState(mappings); - } + private static final String TEST_MAPPING_FILE = "mappings/field_mappings.json"; - @After - public void cleanUp() { - LocalClusterState.state(null); - } + @Before + public void setUp() throws IOException { + URL url = Resources.getResource(TEST_MAPPING_FILE); + String mappings = Resources.toString(url, Charsets.UTF_8); + mockLocalClusterState(mappings); + } - @Test - public void flatFieldMappingsShouldIncludeFieldsOnAllLevels() { - IndexMappings indexMappings = LocalClusterState.state().getFieldMappings(new String[]{"field_mappings"}); - FieldMappings fieldMappings = indexMappings.firstMapping(); + @After + public void cleanUp() { + LocalClusterState.state(null); + } - Map typeByFieldName = new HashMap<>(); - fieldMappings.flat(typeByFieldName::put); - assertThat( - typeByFieldName, - allOf( - aMapWithSize(13), - hasEntry("address", "text"), - hasEntry("age", "integer"), - hasEntry("employer", "text"), - hasEntry("employer.raw", "text"), - hasEntry("employer.keyword", "keyword"), - hasEntry("projects", "nested"), - hasEntry("projects.active", "boolean"), - hasEntry("projects.members", "nested"), - hasEntry("projects.members.name", "text"), - hasEntry("manager", "object"), - hasEntry("manager.name", "text"), - hasEntry("manager.name.keyword", "keyword"), - hasEntry("manager.address", "keyword") - ) - ); - } + @Test + public void flatFieldMappingsShouldIncludeFieldsOnAllLevels() { + IndexMappings indexMappings = + LocalClusterState.state().getFieldMappings(new String[] {"field_mappings"}); + FieldMappings fieldMappings = indexMappings.firstMapping(); + Map typeByFieldName = new HashMap<>(); + fieldMappings.flat(typeByFieldName::put); + assertThat( + typeByFieldName, + allOf( + aMapWithSize(13), + hasEntry("address", "text"), + hasEntry("age", "integer"), + hasEntry("employer", "text"), + hasEntry("employer.raw", "text"), + hasEntry("employer.keyword", "keyword"), + hasEntry("projects", "nested"), + hasEntry("projects.active", "boolean"), + hasEntry("projects.members", "nested"), + hasEntry("projects.members.name", "text"), + hasEntry("manager", "object"), + hasEntry("manager.name", "text"), + hasEntry("manager.name.keyword", "keyword"), + hasEntry("manager.address", "keyword"))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java index b9c4935f50..5a6bc4541e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java @@ -3,46 +3,43 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; import org.junit.Assert; import org.junit.Test; -/** - * Test cases for util class {@link Identifier}. - */ +/** Test cases for util class {@link Identifier}. */ public class IdentifierTest { - @Test - public void identifierWithWordBeforeFirstDotShouldBeConsideredHavePrefix() { - Assert.assertTrue(identifier("accounts.age").hasPrefix()); - } - - @Test - public void identifierWithoutDotShouldNotBeConsideredHavePrefix() { - Assert.assertFalse(identifier("age").hasPrefix()); - } - - @Test - public void identifierStartingWithDotShouldNotBeConsideredHavePrefix() { - Assert.assertFalse(identifier(".age").hasPrefix()); - } - - @Test - public void prefixOfIdentifierShouldBeWordBeforeFirstDot() { - Assert.assertEquals("accounts", identifier("accounts.age").prefix()); - } - - @Test - public void removePrefixShouldRemoveFirstWordAndDot() { - Identifier identifier = identifier("accounts.age"); - identifier.removePrefix(); - Assert.assertEquals("age", identifier.name()); - } - - private Identifier identifier(String name) { - return new Identifier(new SQLIdentifierExpr(name)); - } + @Test + public void identifierWithWordBeforeFirstDotShouldBeConsideredHavePrefix() { + Assert.assertTrue(identifier("accounts.age").hasPrefix()); + } + + @Test + public void identifierWithoutDotShouldNotBeConsideredHavePrefix() { + Assert.assertFalse(identifier("age").hasPrefix()); + } + + @Test + public void identifierStartingWithDotShouldNotBeConsideredHavePrefix() { + Assert.assertFalse(identifier(".age").hasPrefix()); + } + + @Test + public void prefixOfIdentifierShouldBeWordBeforeFirstDot() { + Assert.assertEquals("accounts", identifier("accounts.age").prefix()); + } + + @Test + public void removePrefixShouldRemoveFirstWordAndDot() { + Identifier identifier = identifier("accounts.age"); + identifier.removePrefix(); + Assert.assertEquals("age", identifier.name()); + } + + private Identifier identifier(String name) { + return new Identifier(new SQLIdentifierExpr(name)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java index c4c9504486..09cd9e9efc 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import org.junit.Assert; @@ -16,35 +15,39 @@ public class ErrorMessageFactoryTest { - private Throwable nonOpenSearchThrowable = new Throwable(); - private Throwable openSearchThrowable = new OpenSearchException(nonOpenSearchThrowable); - - @Test - public void openSearchExceptionShouldCreateEsErrorMessage() { - Exception exception = new OpenSearchException(nonOpenSearchThrowable); - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertTrue(msg instanceof OpenSearchErrorMessage); - } - - @Test - public void nonOpenSearchExceptionShouldCreateGenericErrorMessage() { - Exception exception = new Exception(nonOpenSearchThrowable); - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertFalse(msg instanceof OpenSearchErrorMessage); - } - - @Test - public void nonOpenSearchExceptionWithWrappedEsExceptionCauseShouldCreateEsErrorMessage() { - Exception exception = (Exception) openSearchThrowable; - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertTrue(msg instanceof OpenSearchErrorMessage); - } - - @Test - public void nonOpenSearchExceptionWithMultiLayerWrappedEsExceptionCauseShouldCreateEsErrorMessage() { - Exception exception = new Exception(new Throwable(new Throwable(openSearchThrowable))); - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertTrue(msg instanceof OpenSearchErrorMessage); - } - + private Throwable nonOpenSearchThrowable = new Throwable(); + private Throwable openSearchThrowable = new OpenSearchException(nonOpenSearchThrowable); + + @Test + public void openSearchExceptionShouldCreateEsErrorMessage() { + Exception exception = new OpenSearchException(nonOpenSearchThrowable); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertTrue(msg instanceof OpenSearchErrorMessage); + } + + @Test + public void nonOpenSearchExceptionShouldCreateGenericErrorMessage() { + Exception exception = new Exception(nonOpenSearchThrowable); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertFalse(msg instanceof OpenSearchErrorMessage); + } + + @Test + public void nonOpenSearchExceptionWithWrappedEsExceptionCauseShouldCreateEsErrorMessage() { + Exception exception = (Exception) openSearchThrowable; + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertTrue(msg instanceof OpenSearchErrorMessage); + } + + @Test + public void + nonOpenSearchExceptionWithMultiLayerWrappedEsExceptionCauseShouldCreateEsErrorMessage() { + Exception exception = new Exception(new Throwable(new Throwable(openSearchThrowable))); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertTrue(msg instanceof OpenSearchErrorMessage); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java index 5a13125013..deb7b5f600 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertEquals; @@ -16,15 +15,15 @@ public class FormatTest { - @Test - public void ofJdbcShouldReturnJDBCFormat() { - Optional format = Format.of(Format.JDBC.getFormatName()); - assertTrue(format.isPresent()); - assertEquals(Format.JDBC, format.get()); - } + @Test + public void ofJdbcShouldReturnJDBCFormat() { + Optional format = Format.of(Format.JDBC.getFormatName()); + assertTrue(format.isPresent()); + assertEquals(Format.JDBC, format.get()); + } - @Test - public void ofUnknownFormatShouldReturnEmpty() { - assertFalse(Format.of("xml").isPresent()); - } + @Test + public void ofUnknownFormatShouldReturnEmpty() { + assertFalse(Format.of("xml").isPresent()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java index fee440c3e9..8863af0463 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static java.util.stream.Collectors.toMap; @@ -35,360 +34,277 @@ import org.opensearch.sql.legacy.query.maker.AggMaker; import org.opensearch.sql.legacy.util.SqlParserUtils; - public class HavingTest { - private static final String SELECT_CNT = "SELECT COUNT(*) as c "; - private static final String SELECT_CNT_AVG = "SELECT COUNT(*) as c, AVG(age) as a "; - private static final String SELECT_CNT_AVG_SUM = "SELECT COUNT(*) as c, AVG(age) as a, SUM(income) as i "; - private static final String FROM_BANK = "FROM bank "; - private static final String GROUP_BY_AGE = "GROUP BY age "; - private static final String SELECT_CNT_FROM_BANK_GROUP_BY_AGE = SELECT_CNT + FROM_BANK + GROUP_BY_AGE; - private static final String SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE = SELECT_CNT_AVG + FROM_BANK + GROUP_BY_AGE; - private static final String SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE = SELECT_CNT_AVG_SUM + FROM_BANK + GROUP_BY_AGE; - private static final String NESTED_SELECT_COUNT = "SELECT COUNT(nested(income, 'income')) as c "; - private static final String NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE = NESTED_SELECT_COUNT + FROM_BANK + GROUP_BY_AGE; - - @Test - public void singleCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a"), - hasScript("params.a > 30") - ) - )); - } - - @Ignore - @Test - public void singleConditionWithTwoAggExpr() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > c"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a"), - hasScript("params.a > params.c") - ) - )); - } - - @Test - public void singleConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "avg_0: avg_0"), - hasScript("params.avg_0 > 30") - ) - )); - } - - @Ignore - @Test - public void singleConditionWithHavingTwoAggExpr() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > COUNT(*)"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "avg_0: avg_0", "count_0: count_0"), - hasScript("params.avg_0 > count_0") - ) - )); - } - - @Test - public void nestedSingleCondition() { - assertThat( - query(NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING c > 30"), - contains( - bucketSelector( - hasBucketPath("c: income@NESTED.c"), - hasScript("params.c > 30") - ) - )); - } - - @Test - public void singleConditionWithOneFieldInSelect() { - assertThat( - query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), - contains( - bucketSelector( - hasBucketPath("c: c") - ) - )); - } - - @Test - public void singleConditionWithOneFieldInSelectWithHavingAgg() { - assertThat( - query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "avg_0: avg_0"), - hasScript("params.avg_0 > 30") - ) - )); - } - - @Test - public void singleConditionWithThreeFieldsInSelect() { - assertThat( - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "i: i") - ) - )); - } - - @Test - public void singleConditionWithThreeFieldsInSelectWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "i: i", "avg_0: avg_0"), - hasScript("params.avg_0 > 30") - ) - )); - } - - @Test - public void notEqualCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a <> 30"), - contains( - bucketSelector( - hasScript("params.a != 30") - ) - )); - } - - @Test - public void notEqualConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) <> 30"), - contains( - bucketSelector( - hasScript("params.avg_0 != 30") - ) - )); - } - - @Test - public void notCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (a > 30)"), - contains( - bucketSelector( - hasScript("params.a <= 30") - ) - )); - } - - @Test - public void notConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (AVG(a) > 30)"), - contains( - bucketSelector( - hasScript("params.avg_0 <= 30") - ) - )); - } - - @Test - public void andConditions() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 AND c <= 10"), - contains( - bucketSelector( - hasScript("params.a > 30 && params.c <= 10") - ) - )); - } - - @Test - public void andConditionsWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 AND SUM(c) <= 10"), - contains( - bucketSelector( - hasScript("params.avg_0 > 30 && params.sum_1 <= 10") - ) - )); - } - - @Test - public void orConditions() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 OR c <= 10"), - contains( - bucketSelector( - hasScript("params.a > 30 || params.c <= 10") - ) - )); - } - - @Test - public void orConditionsWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 OR SUM(c) <= 10"), - contains( - bucketSelector( - hasScript("params.avg_0 > 30 || params.sum_1 <= 10") - ) - )); - } - - @Test - public void betweenCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.a >= 30 && params.a <= 50") - ) - )); - } - - @Test - public void betweenConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.avg_0 >= 30 && params.avg_0 <= 50") - ) - )); - } - - @Test - public void notBetweenCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.a < 30 || params.a > 50") - ) - )); - } - - @Test - public void notBetweenConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.avg_0 < 30 || params.avg_0 > 50") - ) - )); - } - - @Test - public void inCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.a == 30 || params.a == 40 || params.a == 50") - ) - )); - } - - @Test - public void inConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.avg_0 == 30 || params.avg_0 == 40 || params.avg_0 == 50") - ) - )); - } - - @Test - public void notInCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.a != 30 && params.a != 40 && params.a != 50") - ) - )); - } - - @Test - public void notInConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.avg_0 != 30 && params.avg_0 != 40 && params.avg_0 != 50") - ) - )); - } - - @Test - public void nestedConditions() { - assertThat( - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING i <= 10000 OR NOT (a < 10 OR a > 30) AND c <= 10"), - contains( - bucketSelector( - hasScript("params.i <= 10000 || ((params.a >= 10 && params.a <= 30) && params.c <= 10)") - ) - )); - } - - @Test(expected = ParserException.class) - public void aggregationFunctionOnTheRight() { - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING 10 < a"); - } - - private Collection query(String sql) { - return translate(SqlParserUtils.parse(sql)); - } - - private Collection translate(SQLQueryExpr expr) { - try { - Select select = new SqlParser().parseSelect(expr); - select.getFields().forEach(field -> { + private static final String SELECT_CNT = "SELECT COUNT(*) as c "; + private static final String SELECT_CNT_AVG = "SELECT COUNT(*) as c, AVG(age) as a "; + private static final String SELECT_CNT_AVG_SUM = + "SELECT COUNT(*) as c, AVG(age) as a, SUM(income) as i "; + private static final String FROM_BANK = "FROM bank "; + private static final String GROUP_BY_AGE = "GROUP BY age "; + private static final String SELECT_CNT_FROM_BANK_GROUP_BY_AGE = + SELECT_CNT + FROM_BANK + GROUP_BY_AGE; + private static final String SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE = + SELECT_CNT_AVG + FROM_BANK + GROUP_BY_AGE; + private static final String SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE = + SELECT_CNT_AVG_SUM + FROM_BANK + GROUP_BY_AGE; + private static final String NESTED_SELECT_COUNT = "SELECT COUNT(nested(income, 'income')) as c "; + private static final String NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE = + NESTED_SELECT_COUNT + FROM_BANK + GROUP_BY_AGE; + + @Test + public void singleCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), + contains(bucketSelector(hasBucketPath("c: c", "a: a"), hasScript("params.a > 30")))); + } + + @Ignore + @Test + public void singleConditionWithTwoAggExpr() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > c"), + contains(bucketSelector(hasBucketPath("c: c", "a: a"), hasScript("params.a > params.c")))); + } + + @Test + public void singleConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > 30"), + contains( + bucketSelector( + hasBucketPath("c: c", "a: a", "avg_0: avg_0"), hasScript("params.avg_0 > 30")))); + } + + @Ignore + @Test + public void singleConditionWithHavingTwoAggExpr() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > COUNT(*)"), + contains( + bucketSelector( + hasBucketPath("c: c", "a: a", "avg_0: avg_0", "count_0: count_0"), + hasScript("params.avg_0 > count_0")))); + } + + @Test + public void nestedSingleCondition() { + assertThat( + query(NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING c > 30"), + contains(bucketSelector(hasBucketPath("c: income@NESTED.c"), hasScript("params.c > 30")))); + } + + @Test + public void singleConditionWithOneFieldInSelect() { + assertThat( + query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), + contains(bucketSelector(hasBucketPath("c: c")))); + } + + @Test + public void singleConditionWithOneFieldInSelectWithHavingAgg() { + assertThat( + query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), + contains( + bucketSelector(hasBucketPath("c: c", "avg_0: avg_0"), hasScript("params.avg_0 > 30")))); + } + + @Test + public void singleConditionWithThreeFieldsInSelect() { + assertThat( + query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), + contains(bucketSelector(hasBucketPath("c: c", "a: a", "i: i")))); + } + + @Test + public void singleConditionWithThreeFieldsInSelectWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), + contains( + bucketSelector( + hasBucketPath("c: c", "a: a", "i: i", "avg_0: avg_0"), + hasScript("params.avg_0 > 30")))); + } + + @Test + public void notEqualCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a <> 30"), + contains(bucketSelector(hasScript("params.a != 30")))); + } + + @Test + public void notEqualConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) <> 30"), + contains(bucketSelector(hasScript("params.avg_0 != 30")))); + } + + @Test + public void notCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (a > 30)"), + contains(bucketSelector(hasScript("params.a <= 30")))); + } + + @Test + public void notConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (AVG(a) > 30)"), + contains(bucketSelector(hasScript("params.avg_0 <= 30")))); + } + + @Test + public void andConditions() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 AND c <= 10"), + contains(bucketSelector(hasScript("params.a > 30 && params.c <= 10")))); + } + + @Test + public void andConditionsWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 AND SUM(c) <= 10"), + contains(bucketSelector(hasScript("params.avg_0 > 30 && params.sum_1 <= 10")))); + } + + @Test + public void orConditions() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 OR c <= 10"), + contains(bucketSelector(hasScript("params.a > 30 || params.c <= 10")))); + } + + @Test + public void orConditionsWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 OR SUM(c) <= 10"), + contains(bucketSelector(hasScript("params.avg_0 > 30 || params.sum_1 <= 10")))); + } + + @Test + public void betweenCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.a >= 30 && params.a <= 50")))); + } + + @Test + public void betweenConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.avg_0 >= 30 && params.avg_0 <= 50")))); + } + + @Test + public void notBetweenCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.a < 30 || params.a > 50")))); + } + + @Test + public void notBetweenConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.avg_0 < 30 || params.avg_0 > 50")))); + } + + @Test + public void inCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a IN (30, 40, 50)"), + contains(bucketSelector(hasScript("params.a == 30 || params.a == 40 || params.a == 50")))); + } + + @Test + public void inConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) IN (30, 40, 50)"), + contains( + bucketSelector( + hasScript("params.avg_0 == 30 || params.avg_0 == 40 || params.avg_0 == 50")))); + } + + @Test + public void notInCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT IN (30, 40, 50)"), + contains(bucketSelector(hasScript("params.a != 30 && params.a != 40 && params.a != 50")))); + } + + @Test + public void notInConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT IN (30, 40, 50)"), + contains( + bucketSelector( + hasScript("params.avg_0 != 30 && params.avg_0 != 40 && params.avg_0 != 50")))); + } + + @Test + public void nestedConditions() { + assertThat( + query( + SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + + "HAVING i <= 10000 OR NOT (a < 10 OR a > 30) AND c <= 10"), + contains( + bucketSelector( + hasScript( + "params.i <= 10000 || ((params.a >= 10 && params.a <= 30) && params.c <=" + + " 10)")))); + } + + @Test(expected = ParserException.class) + public void aggregationFunctionOnTheRight() { + query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING 10 < a"); + } + + private Collection query(String sql) { + return translate(SqlParserUtils.parse(sql)); + } + + private Collection translate(SQLQueryExpr expr) { + try { + Select select = new SqlParser().parseSelect(expr); + select + .getFields() + .forEach( + field -> { try { - new AggMaker() - .withWhere(select.getWhere()) - .makeFieldAgg((MethodField) field, AggregationBuilders.terms("")); + new AggMaker() + .withWhere(select.getWhere()) + .makeFieldAgg((MethodField) field, AggregationBuilders.terms("")); } catch (SqlParseException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }); - AggregationBuilder agg = AggregationBuilders.terms(""); - select.getHaving().explain(agg, select.getFields()); - return agg.getPipelineAggregations(); - } catch (SqlParseException e) { - throw new ParserException("Illegal sql expr: " + expr.toString()); - } - } - - @SafeVarargs - private final Matcher bucketSelector(Matcher... matchers) { - return both(Matchers. // instanceOf() has type inference problem - instanceOf(BucketSelectorPipelineAggregationBuilder.class) - ). - and(allOf(matchers)); - } - - private Matcher hasBucketPath(String... expectedBucketPath) { - Map expectedMap = Arrays.stream(expectedBucketPath). - map(e -> e.split(":")). - collect(toMap(e -> e[0].trim(), e -> e[1].trim())); - return hasFieldWithValue("bucketsPathsMap", "has bucket path", is(expectedMap)); - } - - private Matcher hasScript(String expectedCode) { - return hasFieldWithValue("script", "has script", is(new Script(expectedCode))); - } + }); + AggregationBuilder agg = AggregationBuilders.terms(""); + select.getHaving().explain(agg, select.getFields()); + return agg.getPipelineAggregations(); + } catch (SqlParseException e) { + throw new ParserException("Illegal sql expr: " + expr.toString()); + } + } + + @SafeVarargs + private final Matcher bucketSelector( + Matcher... matchers) { + return both(Matchers + . // instanceOf() has type inference problem + instanceOf(BucketSelectorPipelineAggregationBuilder.class)) + .and(allOf(matchers)); + } + + private Matcher hasBucketPath(String... expectedBucketPath) { + Map expectedMap = + Arrays.stream(expectedBucketPath) + .map(e -> e.split(":")) + .collect(toMap(e -> e[0].trim(), e -> e[1].trim())); + return hasFieldWithValue("bucketsPathsMap", "has bucket path", is(expectedMap)); + } + + private Matcher hasScript(String expectedCode) { + return hasFieldWithValue("script", "has script", is(new Script(expectedCode))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java index f546f3571a..1e5082c0ba 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -52,360 +51,461 @@ @RunWith(MockitoJUnitRunner.Silent.class) public class JSONRequestTest { - @Mock - private ColumnTypeProvider columnTypeProvider; - - @Before - public void setup() { - when(columnTypeProvider.get(anyInt())).thenReturn(Schema.Type.DOUBLE); - } - - @Test - public void aggWithoutWhere() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString( - "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}")); - assertThat(explainSQL, containsString( - "\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); - } - - @Test - public void aggWithWhereOnParent() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING c > 1"); - - assertThat(explainSQL, containsString( - "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); - assertThat(explainSQL, containsString( - "\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); - } - - @Test - public void aggWithWhereOnNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1"); - - assertThat(explainSQL, containsString("\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggWithWhereOnParentOrNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString( - "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); - assertThat(explainSQL, containsString( - "\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); - } - - @Test - public void aggWithWhereOnParentAndNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString( - "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggWithWhereOnNestedAndNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString("\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"must\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggWithWhereOnNestedOrNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString("\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"should\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggInHavingWithoutWhere() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); - } - - @Test - public void aggInHavingWithWhereOnParent() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + @Mock private ColumnTypeProvider columnTypeProvider; + + @Before + public void setup() { + when(columnTypeProvider.get(anyInt())).thenReturn(Schema.Type.DOUBLE); + } + + @Test + public void aggWithoutWhere() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "GROUP BY name " + + "HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}")); + assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); + } + + @Test + public void aggWithWhereOnParent() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING c > 1"); + + assertThat( + explainSQL, + containsString( + "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); + assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); + } + + @Test + public void aggWithWhereOnNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING c > 1"); + + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggWithWhereOnParentOrNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); + assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); + } + + @Test + public void aggWithWhereOnParentAndNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggWithWhereOnNestedAndNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 2000 AND projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"must\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggWithWhereOnNestedOrNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 2000 OR projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"should\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggInHavingWithoutWhere() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name " + + "FROM employee " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test + public void aggInHavingWithWhereOnParent() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test + public void aggInHavingWithWhereOnNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); + } + + @Test + public void aggInHavingWithWhereOnParentOrNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE name LIKE '%smith%' OR nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test + public void aggInHavingWithWhereOnParentAndNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE name LIKE '%smith%' AND nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); + } + + @Test + public void aggInHavingWithWhereOnNestedAndNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 2000 AND" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo( + "[{\"bool\":{\"adjust_pure_negative\":true,\"must\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); + } + + @Test + public void aggInHavingWithWhereOnNestedOrNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 2000 OR" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo( + "[{\"bool\":{\"adjust_pure_negative\":true,\"should\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); + } + + @Test + public void searchSanity() throws IOException { + String result = + explain( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE firstname LIKE 'A%%' AND age > 20 " + + "GROUP BY gender " + + "ORDER BY _score\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + "src/test/resources/expectedOutput/search_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); + } + + // This test was ignored because group by case function is not supported + @Ignore + @Test + public void aggregationQuery() throws IOException { + String result = + explain( + String.format( + "{\"query\":\"SELECT address, CASE WHEN gender='0' THEN 'aaa' ELSE 'bbb' END AS" + + " a2345, count(age) FROM %s GROUP BY" + + " terms('field'='address','execution_hint'='global_ordinals'), a2345\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + + "src/test/resources/expectedOutput/aggregation_query_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); + } + + @Test + public void deleteSanity() throws IOException { + try (MockedStatic localClusterStateMockedStatic = + Mockito.mockStatic(LocalClusterState.class)) { + LocalClusterState state = mock(LocalClusterState.class); + localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); + when(state.getSettingValue(any(Settings.Key.class))).thenReturn(true); + + String result = + explain( + String.format( + "{\"query\":\"" + + "DELETE " + + "FROM %s " + + "WHERE firstname LIKE 'A%%' AND age > 20\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + "src/test/resources/expectedOutput/delete_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); } - - @Test - public void aggInHavingWithWhereOnNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); - } - - @Test - public void aggInHavingWithWhereOnParentOrNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test(expected = SQLFeatureDisabledException.class) + public void deleteShouldThrowExceptionWhenDisabled() + throws SQLFeatureDisabledException, SQLFeatureNotSupportedException, SqlParseException { + try (MockedStatic localClusterStateMockedStatic = + Mockito.mockStatic(LocalClusterState.class)) { + LocalClusterState state = mock(LocalClusterState.class); + localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); + when(state.getSettingValue(any(Settings.Key.class))).thenReturn(false); + + JSONObject jsonRequest = + new JSONObject( + StringUtils.format( + "{\"query\":\"" + + "DELETE " + + "FROM %s " + + "WHERE firstname LIKE 'A%%' AND age > 20\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + translate(jsonRequest.getString("query"), jsonRequest); } - - @Test - public void aggInHavingWithWhereOnParentAndNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); + } + + @Test + public void queryFilter() throws IOException { + /* + * Human-readable format of the request defined below: + * { + * "query": "SELECT * FROM accounts WHERE age > 25", + * "filter": { + * "range": { + * "balance": { + * "lte": 30000 + * } + * } + * } + * } + */ + String result = + explain( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE age > 25\"," + + "\"filter\":{\"range\":{\"balance\":{\"lte\":30000}}}}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + + "src/test/resources/expectedOutput/json_filter_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); + } + + private String removeSpaces(String s) { + return s.replaceAll("\\s+", ""); + } + + private String explainSQL(String sql) { + return explain(String.format("{\"query\":\"%s\"}", sql)); + } + + private JSONObject explainSQLToJson(String sql) { + return new JSONObject(explain(String.format("{\"query\":\"%s\"}", sql))); + } + + private String query(JSONObject jsonObject, String jsonPath) { + return jsonObject.query(jsonPath).toString(); + } + + private String explain(String request) { + try { + JSONObject jsonRequest = new JSONObject(request); + String sql = jsonRequest.getString("query"); + + return translate(sql, jsonRequest); + } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { + throw new ParserException("Illegal sql expr in request: " + request); } - - @Test - public void aggInHavingWithWhereOnNestedAndNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"bool\":{\"adjust_pure_negative\":true,\"must\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); - } - - @Test - public void aggInHavingWithWhereOnNestedOrNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, - "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, - "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"bool\":{\"adjust_pure_negative\":true,\"should\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); - } - - @Test - public void searchSanity() throws IOException { - String result = explain(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE firstname LIKE 'A%%' AND age > 20 " + - "GROUP BY gender " + - "ORDER BY _score\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/search_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - - // This test was ignored because group by case function is not supported - @Ignore - @Test - public void aggregationQuery() throws IOException { - String result = explain(String.format("{\"query\":\"" + - "SELECT address, CASE WHEN gender='0' THEN 'aaa' ELSE 'bbb' END AS a2345, count(age) " + - "FROM %s " + - "GROUP BY terms('field'='address','execution_hint'='global_ordinals'), a2345\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/aggregation_query_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - - @Test - public void deleteSanity() throws IOException { - try (MockedStatic localClusterStateMockedStatic = - Mockito.mockStatic(LocalClusterState.class)) { - LocalClusterState state = mock(LocalClusterState.class); - localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); - when(state.getSettingValue(any(Settings.Key.class))).thenReturn(true); - - String result = explain(String.format("{\"query\":\"" + - "DELETE " + - "FROM %s " + - "WHERE firstname LIKE 'A%%' AND age > 20\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/delete_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - } - - @Test(expected = SQLFeatureDisabledException.class) - public void deleteShouldThrowExceptionWhenDisabled() - throws SQLFeatureDisabledException, SQLFeatureNotSupportedException, - SqlParseException { - try (MockedStatic localClusterStateMockedStatic = - Mockito.mockStatic(LocalClusterState.class)) { - LocalClusterState state = mock(LocalClusterState.class); - localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); - when(state.getSettingValue(any(Settings.Key.class))).thenReturn(false); - - JSONObject jsonRequest = new JSONObject(StringUtils.format("{\"query\":\"" + - "DELETE " + - "FROM %s " + - "WHERE firstname LIKE 'A%%' AND age > 20\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - translate(jsonRequest.getString("query"), jsonRequest); - } - } - - @Test - public void queryFilter() throws IOException { - /* - * Human readable format of the request defined below: - * { - * "query": "SELECT * FROM accounts WHERE age > 25", - * "filter": { - * "range": { - * "balance": { - * "lte": 30000 - * } - * } - * } - * } - */ - String result = explain(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE age > 25\"," + - "\"filter\":{\"range\":{\"balance\":{\"lte\":30000}}}}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/json_filter_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - - private String removeSpaces(String s) { - return s.replaceAll("\\s+", ""); - } - - private String explainSQL(String sql) { - return explain(String.format("{\"query\":\"%s\"}", sql)); - } - - private JSONObject explainSQLToJson(String sql) { - return new JSONObject(explain(String.format("{\"query\":\"%s\"}", sql))); - } - - private String query(JSONObject jsonObject, String jsonPath) { - return jsonObject.query(jsonPath).toString(); - } - - private String explain(String request) { - try { - JSONObject jsonRequest = new JSONObject(request); - String sql = jsonRequest.getString("query"); - - return translate(sql, jsonRequest); - } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { - throw new ParserException("Illegal sql expr in request: " + request); - } - } - - private String translate(String sql, JSONObject jsonRequest) - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - Client mockClient = mock(Client.class); - CheckScriptContents.stubMockClient(mockClient); - QueryAction queryAction = - OpenSearchActionFactory - .create(mockClient, new QueryActionRequest(sql, columnTypeProvider, Format.JDBC)); - - SqlRequest sqlRequest = new SqlRequest(sql, jsonRequest); - queryAction.setSqlRequest(sqlRequest); - - SqlElasticRequestBuilder requestBuilder = queryAction.explain(); - return requestBuilder.explain(); - } - - private String getResourcePath() { - String projectRoot = System.getProperty("project.root"); - if ( projectRoot!= null && projectRoot.trim().length() > 0) { - return projectRoot.trim() + "/"; - } else { - return ""; - } + } + + private String translate(String sql, JSONObject jsonRequest) + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + Client mockClient = mock(Client.class); + CheckScriptContents.stubMockClient(mockClient); + QueryAction queryAction = + OpenSearchActionFactory.create( + mockClient, new QueryActionRequest(sql, columnTypeProvider, Format.JDBC)); + + SqlRequest sqlRequest = new SqlRequest(sql, jsonRequest); + queryAction.setSqlRequest(sqlRequest); + + SqlElasticRequestBuilder requestBuilder = queryAction.explain(); + return requestBuilder.explain(); + } + + private String getResourcePath() { + String projectRoot = System.getProperty("project.root"); + if (projectRoot != null && projectRoot.trim().length() > 0) { + return projectRoot.trim() + "/"; + } else { + return ""; } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java index 00a39ce0d3..6c38af05af 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -37,153 +34,161 @@ import org.opensearch.sql.legacy.util.TestsConstants; import org.opensearch.sql.opensearch.setting.OpenSearchSettings; -/** - * Local cluster state testing without covering OpenSearch logic, ex. resolve index pattern. - */ +/** Local cluster state testing without covering OpenSearch logic, ex. resolve index pattern. */ public class LocalClusterStateTest { - private static final String INDEX_NAME = TestsConstants.TEST_INDEX_BANK; - private static final String TYPE_NAME = "account"; - - private static final String MAPPING = "{\n" + - " \"opensearch-sql_test_index_bank\": {\n" + - " \"mappings\": {\n" + - " \"account\": {\n" + - " \"properties\": {\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"raw\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"manager\": {\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - // ==== All required by IndexMetaData.fromXContent() ==== - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 5,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\"\n" + - //======================================================= - " }\n" + - "}"; - - @Mock private ClusterSettings clusterSettings; - - @Before - public void init() { - MockitoAnnotations.openMocks(this); - LocalClusterState.state(null); - mockLocalClusterState(MAPPING); - } - - @Test - public void getMappingForExistingField() { - IndexMappings indexMappings = LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - Assert.assertNotNull(indexMappings); - - FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); - Assert.assertNotNull(fieldMappings); - - Assert.assertEquals("text", fieldMappings.mapping("address").get("type")); - Assert.assertEquals("integer", fieldMappings.mapping("age").get("type")); - Assert.assertEquals("keyword", fieldMappings.mapping("city").get("type")); - Assert.assertEquals("text", fieldMappings.mapping("employer").get("type")); - - Assert.assertEquals("text", fieldMappings.mapping("manager.name").get("type")); - Assert.assertEquals("keyword", fieldMappings.mapping("manager.address").get("type")); + private static final String INDEX_NAME = TestsConstants.TEST_INDEX_BANK; + private static final String TYPE_NAME = "account"; + + private static final String MAPPING = + "{\n" + + " \"opensearch-sql_test_index_bank\": {\n" + + " \"mappings\": {\n" + + " \"account\": {\n" + + " \"properties\": {\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"raw\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"manager\": {\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + + // ==== All required by IndexMetaData.fromXContent() ==== + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 5,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\",\n" + + " \"aliases_version\": \"1\"\n" + + + // ======================================================= + " }\n" + + "}"; + + @Mock private ClusterSettings clusterSettings; + + @Before + public void init() { + MockitoAnnotations.openMocks(this); + LocalClusterState.state(null); + mockLocalClusterState(MAPPING); + } + + @Test + public void getMappingForExistingField() { + IndexMappings indexMappings = + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); + Assert.assertNotNull(indexMappings); + + FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); + Assert.assertNotNull(fieldMappings); + + Assert.assertEquals("text", fieldMappings.mapping("address").get("type")); + Assert.assertEquals("integer", fieldMappings.mapping("age").get("type")); + Assert.assertEquals("keyword", fieldMappings.mapping("city").get("type")); + Assert.assertEquals("text", fieldMappings.mapping("employer").get("type")); + + Assert.assertEquals("text", fieldMappings.mapping("manager.name").get("type")); + Assert.assertEquals("keyword", fieldMappings.mapping("manager.address").get("type")); + } + + @Test + public void getMappingForInvalidField() { + IndexMappings indexMappings = + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); + FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); + + Assert.assertNull(fieldMappings.mapping("work-email")); + Assert.assertNull(fieldMappings.mapping("manager.home-address")); + Assert.assertNull(fieldMappings.mapping("manager.name.first")); + Assert.assertNull(fieldMappings.mapping("manager.name.first.uppercase")); + } + + @Test + public void getMappingFromCache() throws IOException { + // Mock here again for verification below and mock addListener() + ClusterService mockService = mockClusterService(MAPPING); + ClusterStateListener[] listener = new ClusterStateListener[1]; // Trick to access inside lambda + doAnswer( + invocation -> { + listener[0] = (ClusterStateListener) invocation.getArguments()[0]; + return null; + }) + .when(mockService) + .addListener(any()); + LocalClusterState.state().setClusterService(mockService); + + // 1.Actual findMappings be invoked only once + for (int i = 0; i < 10; i++) { + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); } - - @Test - public void getMappingForInvalidField() { - IndexMappings indexMappings = LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); - - Assert.assertNull(fieldMappings.mapping("work-email")); - Assert.assertNull(fieldMappings.mapping("manager.home-address")); - Assert.assertNull(fieldMappings.mapping("manager.name.first")); - Assert.assertNull(fieldMappings.mapping("manager.name.first.uppercase")); - } - - @Test - public void getMappingFromCache() throws IOException { - // Mock here again for verification below and mock addListener() - ClusterService mockService = mockClusterService(MAPPING); - ClusterStateListener[] listener = new ClusterStateListener[1]; // Trick to access inside lambda - doAnswer(invocation -> { - listener[0] = (ClusterStateListener) invocation.getArguments()[0]; - return null; - }).when(mockService).addListener(any()); - LocalClusterState.state().setClusterService(mockService); - - // 1.Actual findMappings be invoked only once - for (int i = 0; i < 10; i++) { - LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - } - verify(mockService.state().metadata(), times(1)).findMappings(eq(new String[]{INDEX_NAME}), any()); - - // 2.Fire cluster state change event - Assert.assertNotNull(listener[0]); - ClusterChangedEvent mockEvent = mock(ClusterChangedEvent.class); - when(mockEvent.metadataChanged()).thenReturn(true); - listener[0].clusterChanged(mockEvent); - - // 3.Cache should be invalidated and call findMapping another time only - for (int i = 0; i < 5; i++) { - LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - } - verify(mockService.state().metadata(), times(2)).findMappings(eq(new String[]{INDEX_NAME}), any()); + verify(mockService.state().metadata(), times(1)) + .findMappings(eq(new String[] {INDEX_NAME}), any()); + + // 2.Fire cluster state change event + Assert.assertNotNull(listener[0]); + ClusterChangedEvent mockEvent = mock(ClusterChangedEvent.class); + when(mockEvent.metadataChanged()).thenReturn(true); + listener[0].clusterChanged(mockEvent); + + // 3.Cache should be invalidated and call findMapping another time only + for (int i = 0; i < 5; i++) { + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); } - - @Test - public void getDefaultValueForQuerySlowLog() { - when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); - assertEquals(Integer.valueOf(2), settings.getSettingValue(Settings.Key.SQL_SLOWLOG)); - } - + verify(mockService.state().metadata(), times(2)) + .findMappings(eq(new String[] {INDEX_NAME}), any()); + } + + @Test + public void getDefaultValueForQuerySlowLog() { + when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); + OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); + assertEquals(Integer.valueOf(2), settings.getSettingValue(Settings.Key.SQL_SLOWLOG)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java index b52dd3efc6..e62060c574 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertTrue; @@ -17,437 +16,291 @@ public class MathFunctionsTest { - private static SqlParser parser; - - @BeforeClass - public static void init() { parser = new SqlParser(); } - - /** Tests for case insensitivity when calling SQL functions */ - @Test - public void lowerCaseInSelect() { - String query = "SELECT abs(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.abs(doc['age'].value)")); - } - - @Test - public void upperCaseInSelect() { - String query = "SELECT ABS(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.abs(doc['age'].value)")); - } - - @Test - public void lowerCaseInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE sqrt(age) > 5"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.sqrt(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "sqrt_\\d+ > 5")); - } - - @Test - public void upperCaseInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE SQRT(age) > 5"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.sqrt(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "sqrt_\\d+ > 5")); - } - - /** Tests for constant functions */ - @Test - public void eulersNumberInSelect() { - String query = "SELECT E() " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.E")); - } - - @Test - public void eulersNumberInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE E() > 2"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.E")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "E_\\d+ > 2")); - } - - @Test - public void piInSelect() { - String query = "SELECT PI() " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.PI")); - } - - @Test - public void piInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE PI() < 4"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.PI")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "PI_\\d+ < 4")); - } - - /** Tests for general math functions */ - @Test - public void expm1WithPropertyArgument() { - String query = "SELECT * " + - "FROM bank " + - "WHERE expm1(age) > 10"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.expm1(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "expm1_\\d+ > 10")); - } - - @Test - public void expm1WithValueArgument() { - String query = "SELECT * " + - "FROM bank " + - "WHERE expm1(5) > 10"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.expm1(5)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "expm1_\\d+ > 10")); - } - - - /** Tests for trigonometric functions */ - @Test - public void degreesWithPropertyArgument() { - String query = "SELECT degrees(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toDegrees(doc['age'].value)")); - } - - @Test - public void degreesWithValueArgument() { - String query = "SELECT degrees(10) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toDegrees(10)")); - } - - @Test - public void radiansWithPropertyArgument() { - String query = "SELECT radians(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(doc['age'].value)")); - } - - @Test - public void radiansWithValueArgument() { - String query = "SELECT radians(180) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(180)")); - } - - @Test - public void sinWithPropertyArgument() { - String query = "SELECT sin(radians(age)) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "Math.sin\\(radians_\\d+\\)")); - } - - @Test - public void sinWithValueArgument() { - String query = "SELECT sin(radians(180)) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(180)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "Math.sin\\(radians_\\d+\\)")); - } - - @Test - public void atanWithPropertyArgument() { - String query = "SELECT atan(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.atan(doc['age'].value)")); - } - - @Test - public void atanWithValueArgument() { - String query = "SELECT atan(1) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.atan(1)")); - } - - @Test - public void atanWithFunctionArgument() { - String query = "SELECT atan(PI() / 2) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.PI")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "PI_\\d+ / 2")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "Math.atan\\(divide_\\d+\\)")); - } - - @Test - public void coshWithPropertyArgument() { - String query = "SELECT cosh(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.cosh(doc['age'].value)")); - } - - @Test - public void coshWithValueArgument() { - String query = "SELECT cosh(0) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.cosh(0)")); - } - - @Test - public void powerWithPropertyArgument() { - String query = "SELECT POWER(age, 2) FROM bank WHERE POWER(balance, 3) > 0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.pow(doc['age'].value, 2)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.pow(doc['balance'].value, 3)")); - } - - @Test - public void atan2WithPropertyArgument() { - String query = "SELECT ATAN2(age, 2) FROM bank WHERE ATAN2(balance, 3) > 0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.atan2(doc['age'].value, 2)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.atan2(doc['balance'].value, 3)")); - } - - @Test - public void cotWithPropertyArgument() { - String query = "SELECT COT(age) FROM bank WHERE COT(balance) > 0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "1 / Math.tan(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "1 / Math.tan(doc['balance'].value)")); - } - - @Test - public void signWithFunctionPropertyArgument() { - String query = "SELECT SIGN(age) FROM bank WHERE SIGNUM(balance) = 1"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue(CheckScriptContents.scriptContainsString( - scriptField, - "Math.signum(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.signum(doc['balance'].value)")); - } - - @Test - public void logWithOneParam() { - String query = "SELECT LOG(age) FROM bank WHERE LOG(age) = 5.0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.log(doc['age'].value)")); - } - - @Test - public void logWithTwoParams() { - String query = "SELECT LOG(3, age) FROM bank WHERE LOG(3, age) = 5.0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log(doc['age'].value)/Math.log(3)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.log(doc['age'].value)/Math.log(3)")); - } - - @Test - public void log10Test() { - String query = "SELECT LOG10(age) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log10(doc['age'].value)" - ) - ); - } - - @Test - public void lnTest() { - String query = "SELECT LN(age) FROM age WHERE LN(age) = 5.0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.log(doc['age'].value)")); - } - - @Test - public void randWithoutParamTest() { - String query = "SELECT RAND() FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "new Random().nextDouble()" - ) - ); - } - - @Test - public void randWithOneParamTest() { - String query = "SELECT RAND(age) FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "new Random(doc['age'].value).nextDouble()" - ) - ); - } + private static SqlParser parser; + + @BeforeClass + public static void init() { + parser = new SqlParser(); + } + + /** Tests for case insensitivity when calling SQL functions */ + @Test + public void lowerCaseInSelect() { + String query = "SELECT abs(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.abs(doc['age'].value)")); + } + + @Test + public void upperCaseInSelect() { + String query = "SELECT ABS(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.abs(doc['age'].value)")); + } + + @Test + public void lowerCaseInWhere() { + String query = "SELECT * " + "FROM bank WHERE sqrt(age) > 5"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.sqrt(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "sqrt_\\d+ > 5")); + } + + @Test + public void upperCaseInWhere() { + String query = "SELECT * " + "FROM bank WHERE SQRT(age) > 5"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.sqrt(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "sqrt_\\d+ > 5")); + } + + /** Tests for constant functions */ + @Test + public void eulersNumberInSelect() { + String query = "SELECT E() FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.E")); + } + + @Test + public void eulersNumberInWhere() { + String query = "SELECT * " + "FROM bank WHERE E() > 2"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue(CheckScriptContents.scriptContainsString(scriptFilter, "Math.E")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "E_\\d+ > 2")); + } + + @Test + public void piInSelect() { + String query = "SELECT PI() FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.PI")); + } + + @Test + public void piInWhere() { + String query = "SELECT * FROM bank WHERE PI() < 4"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue(CheckScriptContents.scriptContainsString(scriptFilter, "Math.PI")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "PI_\\d+ < 4")); + } + + /** Tests for general math functions */ + @Test + public void expm1WithPropertyArgument() { + String query = "SELECT * FROM bank WHERE expm1(age) > 10"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.expm1(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "expm1_\\d+ > 10")); + } + + @Test + public void expm1WithValueArgument() { + String query = "SELECT * FROM bank WHERE expm1(5) > 10"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue(CheckScriptContents.scriptContainsString(scriptFilter, "Math.expm1(5)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "expm1_\\d+ > 10")); + } + + /** Tests for trigonometric functions */ + @Test + public void degreesWithPropertyArgument() { + String query = "SELECT degrees(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.toDegrees(doc['age'].value)")); + } + + @Test + public void degreesWithValueArgument() { + String query = "SELECT degrees(10) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.toDegrees(10)")); + } + + @Test + public void radiansWithPropertyArgument() { + String query = "SELECT radians(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(doc['age'].value)")); + } + + @Test + public void radiansWithValueArgument() { + String query = "SELECT radians(180) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(180)")); + } + + @Test + public void sinWithPropertyArgument() { + String query = "SELECT sin(radians(age)) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "Math.sin\\(radians_\\d+\\)")); + } + + @Test + public void sinWithValueArgument() { + String query = "SELECT sin(radians(180)) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(180)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "Math.sin\\(radians_\\d+\\)")); + } + + @Test + public void atanWithPropertyArgument() { + String query = "SELECT atan(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.atan(doc['age'].value)")); + } + + @Test + public void atanWithValueArgument() { + String query = "SELECT atan(1) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.atan(1)")); + } + + @Test + public void atanWithFunctionArgument() { + String query = "SELECT atan(PI() / 2) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.PI")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "PI_\\d+ / 2")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "Math.atan\\(divide_\\d+\\)")); + } + + @Test + public void coshWithPropertyArgument() { + String query = "SELECT cosh(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.cosh(doc['age'].value)")); + } + + @Test + public void coshWithValueArgument() { + String query = "SELECT cosh(0) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.cosh(0)")); + } + + @Test + public void powerWithPropertyArgument() { + String query = "SELECT POWER(age, 2) FROM bank WHERE POWER(balance, 3) > 0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.pow(doc['age'].value, 2)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.pow(doc['balance'].value, 3)")); + } + + @Test + public void atan2WithPropertyArgument() { + String query = "SELECT ATAN2(age, 2) FROM bank WHERE ATAN2(balance, 3) > 0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.atan2(doc['age'].value, 2)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.atan2(doc['balance'].value, 3)")); + } + + @Test + public void cotWithPropertyArgument() { + String query = "SELECT COT(age) FROM bank WHERE COT(balance) > 0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "1 / Math.tan(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "1 / Math.tan(doc['balance'].value)")); + } + + @Test + public void signWithFunctionPropertyArgument() { + String query = "SELECT SIGN(age) FROM bank WHERE SIGNUM(balance) = 1"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.signum(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.signum(doc['balance'].value)")); + } + + @Test + public void logWithOneParam() { + String query = "SELECT LOG(age) FROM bank WHERE LOG(age) = 5.0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.log(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.log(doc['age'].value)")); + } + + @Test + public void logWithTwoParams() { + String query = "SELECT LOG(3, age) FROM bank WHERE LOG(3, age) = 5.0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "Math.log(doc['age'].value)/Math.log(3)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.log(doc['age'].value)/Math.log(3)")); + } + + @Test + public void log10Test() { + String query = "SELECT LOG10(age) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.log10(doc['age'].value)")); + } + + @Test + public void lnTest() { + String query = "SELECT LN(age) FROM age WHERE LN(age) = 5.0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.log(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.log(doc['age'].value)")); + } + + @Test + public void randWithoutParamTest() { + String query = "SELECT RAND() FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "new Random().nextDouble()")); + } + + @Test + public void randWithOneParamTest() { + String query = "SELECT RAND(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "new Random(doc['age'].value).nextDouble()")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java index 2160affda0..34c9b941d5 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor.join; import java.io.IOException; @@ -23,30 +22,28 @@ @RunWith(MockitoJUnitRunner.class) public class ElasticUtilsTest { - @Mock - MetaSearchResult metaSearchResult; - - /** - * test handling {@link TotalHits} correctly. - */ - @Test - public void hitsAsStringResult() throws IOException { - final SearchHits searchHits = new SearchHits(new SearchHit[]{}, new TotalHits(1, Relation.EQUAL_TO), 0); - final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); - - Assert.assertEquals(1, new JSONObject(result).query("/hits/total/value")); - Assert.assertEquals(Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); - } - - /** - * test handling {@link TotalHits} with null value correctly. - */ - @Test - public void test_hitsAsStringResult_withNullTotalHits() throws IOException { - final SearchHits searchHits = new SearchHits(new SearchHit[]{}, null, 0); - final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); - - Assert.assertEquals(0, new JSONObject(result).query("/hits/total/value")); - Assert.assertEquals(Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); - } + @Mock MetaSearchResult metaSearchResult; + + /** test handling {@link TotalHits} correctly. */ + @Test + public void hitsAsStringResult() throws IOException { + final SearchHits searchHits = + new SearchHits(new SearchHit[] {}, new TotalHits(1, Relation.EQUAL_TO), 0); + final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); + + Assert.assertEquals(1, new JSONObject(result).query("/hits/total/value")); + Assert.assertEquals( + Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); + } + + /** test handling {@link TotalHits} with null value correctly. */ + @Test + public void test_hitsAsStringResult_withNullTotalHits() throws IOException { + final SearchHits searchHits = new SearchHits(new SearchHit[] {}, null, 0); + final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); + + Assert.assertEquals(0, new JSONObject(result).query("/hits/total/value")); + Assert.assertEquals( + Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java index a6b736eca1..08bac51d77 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getNumberValue; @@ -15,35 +14,35 @@ import org.opensearch.sql.legacy.expression.core.operator.ScalarOperation; import org.opensearch.sql.legacy.expression.domain.BindingTuple; - public class ExpressionTest { - protected BindingTuple bindingTuple() { - String json = "{\n" + - " \"intValue\": 1,\n" + - " \"intValue2\": 2,\n" + - " \"doubleValue\": 2.0,\n" + - " \"negDoubleValue\": -2.0,\n" + - " \"stringValue\": \"string\",\n" + - " \"booleanValue\": true,\n" + - " \"tupleValue\": {\n" + - " \"intValue\": 1,\n" + - " \"doubleValue\": 2.0,\n" + - " \"stringValue\": \"string\"\n" + - " },\n" + - " \"collectValue\": [\n" + - " 1,\n" + - " 2,\n" + - " 3\n" + - " ]\n" + - "}"; - return BindingTuple.from(new JSONObject(json)); - } - - protected Expression of(ScalarOperation op, Expression... expressions) { - return ExpressionFactory.of(op, Arrays.asList(expressions)); - } - - protected Number apply(ScalarOperation op, Expression... expressions) { - return getNumberValue(of(op, expressions).valueOf(bindingTuple())); - } + protected BindingTuple bindingTuple() { + String json = + "{\n" + + " \"intValue\": 1,\n" + + " \"intValue2\": 2,\n" + + " \"doubleValue\": 2.0,\n" + + " \"negDoubleValue\": -2.0,\n" + + " \"stringValue\": \"string\",\n" + + " \"booleanValue\": true,\n" + + " \"tupleValue\": {\n" + + " \"intValue\": 1,\n" + + " \"doubleValue\": 2.0,\n" + + " \"stringValue\": \"string\"\n" + + " },\n" + + " \"collectValue\": [\n" + + " 1,\n" + + " 2,\n" + + " 3\n" + + " ]\n" + + "}"; + return BindingTuple.from(new JSONObject(json)); + } + + protected Expression of(ScalarOperation op, Expression... expressions) { + return ExpressionFactory.of(op, Arrays.asList(expressions)); + } + + protected Number apply(ScalarOperation op, Expression... expressions) { + return getNumberValue(of(op, expressions).valueOf(bindingTuple())); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java index 150afcacd3..ceda04f1b5 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.model; import static org.hamcrest.Matchers.equalTo; @@ -19,42 +18,41 @@ @RunWith(MockitoJUnitRunner.class) public class ExprValueUtilsTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void getIntegerValueWithIntegerExprValueShouldPass() { - assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.integerValue(1)), equalTo(1)); - } - - @Test - public void getDoubleValueWithIntegerExprValueShouldPass() { - assertThat(ExprValueUtils.getDoubleValue(ExprValueFactory.integerValue(1)), equalTo(1d)); - } - - @Test - public void getIntegerWithDoubleExprValueShouldPass() { - assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.doubleValue(1d)), equalTo(1)); - } - - @Test - public void getLongValueFromLongExprValueShouldPass() { - assertThat(ExprValueUtils.getLongValue(ExprValueFactory.from(1L)), equalTo(1L)); - } - - @Test - public void getIntegerValueFromStringExprValueShouldThrowException() { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("invalid to get NUMBER_VALUE from expr type of STRING_VALUE"); - - ExprValueUtils.getIntegerValue(ExprValueFactory.stringValue("string")); - } - - @Test - public void getStringValueFromIntegerExprValueShouldThrowException() { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("invalid to get STRING_VALUE from expr type of INTEGER_VALUE"); - - ExprValueUtils.getStringValue(ExprValueFactory.integerValue(1)); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void getIntegerValueWithIntegerExprValueShouldPass() { + assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.integerValue(1)), equalTo(1)); + } + + @Test + public void getDoubleValueWithIntegerExprValueShouldPass() { + assertThat(ExprValueUtils.getDoubleValue(ExprValueFactory.integerValue(1)), equalTo(1d)); + } + + @Test + public void getIntegerWithDoubleExprValueShouldPass() { + assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.doubleValue(1d)), equalTo(1)); + } + + @Test + public void getLongValueFromLongExprValueShouldPass() { + assertThat(ExprValueUtils.getLongValue(ExprValueFactory.from(1L)), equalTo(1L)); + } + + @Test + public void getIntegerValueFromStringExprValueShouldThrowException() { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("invalid to get NUMBER_VALUE from expr type of STRING_VALUE"); + + ExprValueUtils.getIntegerValue(ExprValueFactory.stringValue("string")); + } + + @Test + public void getStringValueFromIntegerExprValueShouldThrowException() { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("invalid to get STRING_VALUE from expr type of INTEGER_VALUE"); + + ExprValueUtils.getStringValue(ExprValueFactory.integerValue(1)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java index a818a115fd..1ec499ce9b 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -14,19 +13,17 @@ public class GaugeMetricTest { - private static long x = 0; - - @Test - public void getValue() { - GaugeMetric gaugeMetric = new GaugeMetric<>("test", this::getSeq); - - assertThat(gaugeMetric.getValue(), equalTo(1L)); - assertThat(gaugeMetric.getValue(), equalTo(2L)); + private static long x = 0; - } + @Test + public void getValue() { + GaugeMetric gaugeMetric = new GaugeMetric<>("test", this::getSeq); - private long getSeq() { - return ++x; - } + assertThat(gaugeMetric.getValue(), equalTo(1L)); + assertThat(gaugeMetric.getValue(), equalTo(2L)); + } + private long getSeq() { + return ++x; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java index ff6d8e0c49..885ce6a7cd 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -20,55 +19,53 @@ public class MetricsTest { - @Test - public void registerMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test", new BasicCounter())); - - assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); - } - - @Test - public void unRegisterMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); - assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(2)); - - Metrics.getInstance().unregisterMetric("test2"); - assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); - } - - @Test - public void getMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metric metric = Metrics.getInstance().getMetric("test1"); - - assertThat(metric, notNullValue()); - } - - - @Test - public void getAllMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); - List list = Metrics.getInstance().getAllMetrics(); - - assertThat(list.size(), equalTo(2)); - } - - @Test - public void collectToJSON() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); - String res = Metrics.getInstance().collectToJSON(); - JSONObject jsonObject = new JSONObject(res); - - assertThat(jsonObject.getLong("test1"), equalTo(0L)); - assertThat(jsonObject.getInt("test2"), equalTo(0)); - } - + @Test + public void registerMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test", new BasicCounter())); + + assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); + } + + @Test + public void unRegisterMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); + assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(2)); + + Metrics.getInstance().unregisterMetric("test2"); + assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); + } + + @Test + public void getMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metric metric = Metrics.getInstance().getMetric("test1"); + + assertThat(metric, notNullValue()); + } + + @Test + public void getAllMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); + List list = Metrics.getInstance().getAllMetrics(); + + assertThat(list.size(), equalTo(2)); + } + + @Test + public void collectToJSON() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); + String res = Metrics.getInstance().collectToJSON(); + JSONObject jsonObject = new JSONObject(res); + + assertThat(jsonObject.getLong("test1"), equalTo(0L)); + assertThat(jsonObject.getInt("test2"), equalTo(0)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java index 5115757c9c..c33e768f43 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.junit.Assert.assertEquals; @@ -19,36 +18,40 @@ public class FieldMakerTest { - private static final String ALIAS = "a"; - - private static final String TABLE_ALIAS = "t"; - - private FieldMaker fieldMaker; - - @Before - public void init() { - fieldMaker = new FieldMaker(); - } - - @Test - public void makeFieldAssign() throws SqlParseException { - final SQLIntegerExpr sqlExpr = new SQLIntegerExpr(10); - final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); - - assertEquals("script", field.getName()); - assertEquals(ALIAS, field.getParams().get(0).value); - assertTrue(((String)field.getParams().get(1).value).matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); - assertEquals(ALIAS, field.getAlias()); - } - - @Test - public void makeFieldAssignDouble() throws SqlParseException { - final SQLNumberExpr sqlExpr = new SQLNumberExpr(10.0); - final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); - - assertEquals("script", field.getName()); - assertEquals(ALIAS, field.getParams().get(0).value); - assertTrue(((String)field.getParams().get(1).value).matches("def assign_[0-9]+ = 10.0;return assign_[0-9]+;")); - assertEquals(ALIAS, field.getAlias()); - } + private static final String ALIAS = "a"; + + private static final String TABLE_ALIAS = "t"; + + private FieldMaker fieldMaker; + + @Before + public void init() { + fieldMaker = new FieldMaker(); + } + + @Test + public void makeFieldAssign() throws SqlParseException { + final SQLIntegerExpr sqlExpr = new SQLIntegerExpr(10); + final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); + + assertEquals("script", field.getName()); + assertEquals(ALIAS, field.getParams().get(0).value); + assertTrue( + ((String) field.getParams().get(1).value) + .matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); + assertEquals(ALIAS, field.getAlias()); + } + + @Test + public void makeFieldAssignDouble() throws SqlParseException { + final SQLNumberExpr sqlExpr = new SQLNumberExpr(10.0); + final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); + + assertEquals("script", field.getName()); + assertEquals(ALIAS, field.getParams().get(0).value); + assertTrue( + ((String) field.getParams().get(1).value) + .matches("def assign_[0-9]+ = 10.0;return assign_[0-9]+;")); + assertEquals(ALIAS, field.getAlias()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java index 9b88336a85..ed57335980 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertEquals; @@ -14,101 +13,103 @@ public class ExistsSubQueryRewriterTest extends SubQueryRewriterTestBase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void nonCorrelatedExists() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p)"))) - ); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void nonCorrelatedExistsWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING AND p.name LIKE 'security'")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security')"))) - ); - } + @Test + public void nonCorrelatedExists() { + assertEquals( + sqlString( + expr("SELECT e.name FROM employee e, e.projects p WHERE p IS NOT MISSING")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p)" + )))); + } - @Test - public void nonCorrelatedExistsParentWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING AND e.name LIKE 'security'")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p) AND e.name LIKE 'security'"))) - ); - } + @Test + public void nonCorrelatedExistsWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE p IS NOT MISSING AND p.name LIKE 'security'")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p" + + " WHERE p.name LIKE 'security')")))); + } - @Test - public void nonCorrelatedNotExists() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE NOT (p IS NOT MISSING)")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p)"))) - ); - } + @Test + public void nonCorrelatedExistsParentWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE p IS NOT MISSING AND e.name LIKE 'security'")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p)" + + " AND e.name LIKE 'security'")))); + } - @Test - public void nonCorrelatedNotExistsWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE NOT (p IS NOT MISSING AND p.name LIKE 'security')")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security')"))) - ); - } + @Test + public void nonCorrelatedNotExists() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE NOT (p IS NOT MISSING)")), + sqlString( + rewrite( + expr( + "SELECT e.name " + + "FROM employee as e " + + "WHERE NOT EXISTS (SELECT * FROM e.projects as p)")))); + } - @Test - public void nonCorrelatedNotExistsParentWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE NOT (p IS NOT MISSING) AND e.name LIKE 'security'")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p) AND e.name LIKE 'security'"))) - ); - } + @Test + public void nonCorrelatedNotExistsWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE NOT (p IS NOT MISSING AND p.name LIKE 'security')")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE NOT EXISTS (SELECT * FROM e.projects as" + + " p WHERE p.name LIKE 'security')")))); + } - @Test - public void nonCorrelatedExistsAnd() { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("Unsupported subquery"); - rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p) AND EXISTS (SELECT * FROM e.comments as c)")); - } + @Test + public void nonCorrelatedNotExistsParentWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE NOT (p IS NOT MISSING) AND e.name LIKE 'security'")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE NOT EXISTS (SELECT * FROM e.projects as" + + " p) AND e.name LIKE 'security'")))); + } + @Test + public void nonCorrelatedExistsAnd() { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("Unsupported subquery"); + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p) AND" + + " EXISTS (SELECT * FROM e.comments as c)")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java index e6bd42a273..bb33baae7d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertEquals; @@ -14,73 +13,62 @@ public class InSubqueryRewriterTest extends SubQueryRewriterTestBase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void nonCorrleatedIn() throws Exception { - assertEquals( - sqlString(expr( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "JOIN TbB as TbB_1 " + - "ON TbA_0.a = TbB_1.b " + - "WHERE TbB_1.b IS NOT NULL")), - sqlString(rewrite(expr( - "SELECT * FROM TbA " + - "WHERE a in (SELECT b FROM TbB)"))) - ); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void nonCorrleatedInWithWhere() throws Exception { - assertEquals( - sqlString(expr( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "JOIN TbB as TbB_1 " + - "ON TbA_0.a = TbB_1.b " + - "WHERE TbB_1.b IS NOT NULL AND TbB_1.b > 0")), - sqlString(rewrite(expr( - "SELECT * " + - "FROM TbA " + - "WHERE a in (SELECT b FROM TbB WHERE b > 0)"))) - ); - } + @Test + public void nonCorrleatedIn() throws Exception { + assertEquals( + sqlString( + expr( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "JOIN TbB as TbB_1 " + + "ON TbA_0.a = TbB_1.b " + + "WHERE TbB_1.b IS NOT NULL")), + sqlString(rewrite(expr("SELECT * FROM TbA " + "WHERE a in (SELECT b FROM TbB)")))); + } - @Test - public void nonCorrleatedInWithOuterWhere() throws Exception { - assertEquals( - sqlString(expr( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "JOIN TbB as TbB_1 " + - "ON TbA_0.a = TbB_1.b " + - "WHERE TbB_1.b IS NOT NULL AND TbA_0.a > 10")), - sqlString(rewrite(expr( - "SELECT * " + - "FROM TbA " + - "WHERE a in (SELECT b FROM TbB) AND a > 10"))) - ); - } + @Test + public void nonCorrleatedInWithWhere() throws Exception { + assertEquals( + sqlString( + expr( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "JOIN TbB as TbB_1 " + + "ON TbA_0.a = TbB_1.b " + + "WHERE TbB_1.b IS NOT NULL AND TbB_1.b > 0")), + sqlString( + rewrite( + expr("SELECT * " + "FROM TbA " + "WHERE a in (SELECT b FROM TbB WHERE b > 0)")))); + } + @Test + public void nonCorrleatedInWithOuterWhere() throws Exception { + assertEquals( + sqlString( + expr( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "JOIN TbB as TbB_1 " + + "ON TbA_0.a = TbB_1.b " + + "WHERE TbB_1.b IS NOT NULL AND TbA_0.a > 10")), + sqlString( + rewrite( + expr("SELECT * " + "FROM TbA " + "WHERE a in (SELECT b FROM TbB) AND a > 10")))); + } - @Test - public void notInUnsupported() throws Exception { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("Unsupported subquery"); - rewrite(expr( - "SELECT * FROM TbA " + - "WHERE a not in (SELECT b FROM TbB)")); - } + @Test + public void notInUnsupported() throws Exception { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("Unsupported subquery"); + rewrite(expr("SELECT * FROM TbA WHERE a not in (SELECT b FROM TbB)")); + } - @Test - public void testMultipleSelectException() throws Exception { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("Unsupported subquery with multiple select [TbB_1.b1, TbB_1.b2]"); - rewrite(expr( - "SELECT * " + - "FROM TbA " + - "WHERE a in (SELECT b1, b2 FROM TbB) AND a > 10")); - } + @Test + public void testMultipleSelectException() throws Exception { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("Unsupported subquery with multiple select [TbB_1.b1, TbB_1.b2]"); + rewrite(expr("SELECT * " + "FROM TbA WHERE a in (SELECT b1, b2 FROM TbB) AND a > 10")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java index 34a915ac2b..8aae3996a0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery.utils; import static org.junit.Assert.assertEquals; @@ -16,36 +15,34 @@ public class FindSubQueryTest { - @Test - public void hasInSubQuery() { - FindSubQuery findSubQuery = new FindSubQuery(); - - parse("SELECT * FROM TbA " + - "WHERE a in (SELECT b FROM TbB)").accept(findSubQuery); - assertTrue(findSubQuery.hasSubQuery()); - assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); - assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); - } - - @Test - public void hasExistSubQuery() { - FindSubQuery findSubQuery = new FindSubQuery(); - - parse("SELECT * FROM TbA " + - "WHERE EXISTS (SELECT * FROM TbB)").accept(findSubQuery); - assertTrue(findSubQuery.hasSubQuery()); - assertFalse(findSubQuery.getSqlExistsExprs().isEmpty()); - assertEquals(1, findSubQuery.getSqlExistsExprs().size()); - } - - @Test - public void stopVisitWhenFound() { - FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); - - parse("SELECT * FROM TbA " + - "WHERE a in (SELECT b FROM TbB WHERE b2 in (SELECT c FROM Tbc))").accept(findSubQuery); - assertTrue(findSubQuery.hasSubQuery()); - assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); - assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); - } + @Test + public void hasInSubQuery() { + FindSubQuery findSubQuery = new FindSubQuery(); + + parse("SELECT * FROM TbA " + "WHERE a in (SELECT b FROM TbB)").accept(findSubQuery); + assertTrue(findSubQuery.hasSubQuery()); + assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); + assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); + } + + @Test + public void hasExistSubQuery() { + FindSubQuery findSubQuery = new FindSubQuery(); + + parse("SELECT * FROM TbA WHERE EXISTS (SELECT * FROM TbB)").accept(findSubQuery); + assertTrue(findSubQuery.hasSubQuery()); + assertFalse(findSubQuery.getSqlExistsExprs().isEmpty()); + assertEquals(1, findSubQuery.getSqlExistsExprs().size()); + } + + @Test + public void stopVisitWhenFound() { + FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); + + parse("SELECT * FROM TbA WHERE a in (SELECT b FROM TbB WHERE b2 in (SELECT c FROM Tbc))") + .accept(findSubQuery); + assertTrue(findSubQuery.hasSubQuery()); + assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); + assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java index 3a7f074a0f..74f6411f73 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import java.lang.reflect.Field; @@ -13,42 +12,43 @@ /** * A matcher for private field value extraction along with matcher to assert its value. * - * @param Type of target (actual) object - * @param Type of field member (feature) extracted from target object by reflection + * @param Type of target (actual) object + * @param Type of field member (feature) extracted from target object by reflection */ public class HasFieldWithValue extends FeatureMatcher { - private final String fieldName; - - /** - * Construct a matcher. Reordered the argument list. - * - * @param name Identifying text for mismatch message - * @param desc Descriptive text to use in describeTo - * @param matcher The matcher to apply to the feature - */ - private HasFieldWithValue(String name, String desc, Matcher matcher) { - super(matcher, desc, name); - this.fieldName = name; - } - - public static HasFieldWithValue hasFieldWithValue(String name, String desc, Matcher matcher) { - return new HasFieldWithValue<>(name, desc, matcher); - } - - @Override - protected U featureValueOf(T targetObj) { - return getFieldValue(targetObj, fieldName); - } - - @SuppressWarnings("unchecked") - private U getFieldValue(Object obj, String fieldName) { - try { - Field field = obj.getClass().getDeclaredField(fieldName); - field.setAccessible(true); - return (U) field.get(obj); - } catch (NoSuchFieldException | IllegalAccessException e) { - throw new IllegalArgumentException(e); - } + private final String fieldName; + + /** + * Construct a matcher. Reordered the argument list. + * + * @param name Identifying text for mismatch message + * @param desc Descriptive text to use in describeTo + * @param matcher The matcher to apply to the feature + */ + private HasFieldWithValue(String name, String desc, Matcher matcher) { + super(matcher, desc, name); + this.fieldName = name; + } + + public static HasFieldWithValue hasFieldWithValue( + String name, String desc, Matcher matcher) { + return new HasFieldWithValue<>(name, desc, matcher); + } + + @Override + protected U featureValueOf(T targetObj) { + return getFieldValue(targetObj, fieldName); + } + + @SuppressWarnings("unchecked") + private U getFieldValue(Object obj, String fieldName) { + try { + Field field = obj.getClass().getDeclaredField(fieldName); + field.setAccessible(true); + return (U) field.get(obj); + } catch (NoSuchFieldException | IllegalAccessException e) { + throw new IllegalArgumentException(e); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java index 84f19de58b..0e5f699092 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java @@ -3,9 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; @@ -17,7 +17,6 @@ import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItems; import static org.junit.Assert.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import com.google.common.base.Strings; import java.util.ArrayList; @@ -36,260 +35,266 @@ public class MatcherUtils { - /** - * Assert field value in object by a custom matcher and getter to access the field. - * - * @param name description - * @param subMatcher sub-matcher for field - * @param getter getter function to access the field - * @param type of outer object - * @param type of inner field - * @return matcher - */ - public static FeatureMatcher featureValueOf(String name, - Matcher subMatcher, - Function getter) { - return new FeatureMatcher(subMatcher, name, name) { - @Override - protected U featureValueOf(T actual) { - return getter.apply(actual); - } - }; - } - - @SafeVarargs - public static Matcher hits(Matcher... hitMatchers) { - if (hitMatchers.length == 0) { - return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); - } - return featureValueOf("SearchHits", arrayContainingInAnyOrder(hitMatchers), SearchHits::getHits); + /** + * Assert field value in object by a custom matcher and getter to access the field. + * + * @param name description + * @param subMatcher sub-matcher for field + * @param getter getter function to access the field + * @param type of outer object + * @param type of inner field + * @return matcher + */ + public static FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { + return new FeatureMatcher(subMatcher, name, name) { + @Override + protected U featureValueOf(T actual) { + return getter.apply(actual); + } + }; + } + + @SafeVarargs + public static Matcher hits(Matcher... hitMatchers) { + if (hitMatchers.length == 0) { + return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); } - - @SafeVarargs - public static Matcher hitsInOrder(Matcher... hitMatchers) { - if (hitMatchers.length == 0) { - return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); - } - return featureValueOf("SearchHits", arrayContaining(hitMatchers), SearchHits::getHits); - } - - @SuppressWarnings("unchecked") - public static Matcher hit(Matcher>... entryMatchers) { - return featureValueOf("SearchHit", allOf(entryMatchers), SearchHit::getSourceAsMap); - } - - @SuppressWarnings("unchecked") - public static Matcher> kv(String key, Object value) { - // Use raw type to avoid generic type problem from Matcher> to Matcher - return (Matcher) hasEntry(key, value); + return featureValueOf( + "SearchHits", arrayContainingInAnyOrder(hitMatchers), SearchHits::getHits); + } + + @SafeVarargs + public static Matcher hitsInOrder(Matcher... hitMatchers) { + if (hitMatchers.length == 0) { + return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); } - - public static Matcher hitAny(String query, Matcher... matcher) { - return featureValueOf("SearchHits", hasItems(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query(query)); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; + return featureValueOf("SearchHits", arrayContaining(hitMatchers), SearchHits::getHits); + } + + @SuppressWarnings("unchecked") + public static Matcher hit(Matcher>... entryMatchers) { + return featureValueOf("SearchHit", allOf(entryMatchers), SearchHit::getSourceAsMap); + } + + @SuppressWarnings("unchecked") + public static Matcher> kv(String key, Object value) { + // Use raw type to avoid generic type problem from Matcher> to Matcher + return (Matcher) hasEntry(key, value); + } + + public static Matcher hitAny(String query, Matcher... matcher) { + return featureValueOf( + "SearchHits", + hasItems(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query(query)); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; }); - } - - public static Matcher hitAny(Matcher... matcher) { - return hitAny("/hits/hits", matcher); - } - - public static Matcher hitAll(Matcher... matcher) { - return featureValueOf("SearchHits", containsInAnyOrder(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query("/hits/hits")); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; + } + + public static Matcher hitAny(Matcher... matcher) { + return hitAny("/hits/hits", matcher); + } + + public static Matcher hitAll(Matcher... matcher) { + return featureValueOf( + "SearchHits", + containsInAnyOrder(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query("/hits/hits")); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; }); + } + + public static Matcher kvString(String key, Matcher matcher) { + return featureValueOf("Json Match", matcher, actual -> (String) actual.query(key)); + } + + public static Matcher kvDouble(String key, Matcher matcher) { + return featureValueOf("Json Match", matcher, actual -> (Double) actual.query(key)); + } + + public static Matcher kvInt(String key, Matcher matcher) { + return featureValueOf("Json Match", matcher, actual -> (Integer) actual.query(key)); + } + + @SafeVarargs + public static void verifySchema(JSONObject response, Matcher... matchers) { + verify(response.getJSONArray("schema"), matchers); + } + + @SafeVarargs + public static void verifyDataRows(JSONObject response, Matcher... matchers) { + verify(response.getJSONArray("datarows"), matchers); + } + + @SafeVarargs + public static void verifyColumn(JSONObject response, Matcher... matchers) { + verify(response.getJSONArray("schema"), matchers); + } + + @SafeVarargs + public static void verifyOrder(JSONObject response, Matcher... matchers) { + verifyOrder(response.getJSONArray("datarows"), matchers); + } + + @SafeVarargs + @SuppressWarnings("unchecked") + public static void verifyDataRowsInOrder(JSONObject response, Matcher... matchers) { + verifyInOrder(response.getJSONArray("datarows"), matchers); + } + + @SuppressWarnings("unchecked") + public static void verify(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + assertEquals(matchers.length, objects.size()); + assertThat(objects, containsInAnyOrder(matchers)); + } + + @SafeVarargs + @SuppressWarnings("unchecked") + public static void verifyInOrder(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + assertEquals(matchers.length, objects.size()); + assertThat(objects, contains(matchers)); + } + + @SuppressWarnings("unchecked") + public static void verifySome(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + + assertThat(matchers.length, greaterThan(0)); + for (Matcher matcher : matchers) { + assertThat(objects, hasItems(matcher)); } - - public static Matcher kvString(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> (String) actual.query(key)); - } - - public static Matcher kvDouble(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> (Double) actual.query(key)); + } + + @SafeVarargs + public static void verifyOrder(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + assertEquals(matchers.length, objects.size()); + assertThat(objects, containsInRelativeOrder(matchers)); + } + + public static TypeSafeMatcher schema( + String expectedName, String expectedAlias, String expectedType) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText( + String.format( + "(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); + } + + @Override + protected boolean matchesSafely(JSONObject jsonObject) { + String actualName = (String) jsonObject.query("/name"); + String actualAlias = (String) jsonObject.query("/alias"); + String actualType = (String) jsonObject.query("/type"); + return expectedName.equals(actualName) + && (Strings.isNullOrEmpty(actualAlias) && Strings.isNullOrEmpty(expectedAlias) + || expectedAlias.equals(actualAlias)) + && expectedType.equals(actualType); + } + }; + } + + public static TypeSafeMatcher rows(Object... expectedObjects) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText(String.join(",", Arrays.asList(expectedObjects).toString())); + } + + @Override + protected boolean matchesSafely(JSONArray array) { + List actualObjects = new ArrayList<>(); + array.iterator().forEachRemaining(actualObjects::add); + return Arrays.asList(expectedObjects).equals(actualObjects); + } + }; + } + + public static TypeSafeMatcher columnPattern(String regex) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(JSONObject jsonObject) { + return ((String) jsonObject.query("/name")).matches(regex); + } + + @Override + public void describeTo(Description description) { + description.appendText(String.format("(column_pattern=%s)", regex)); + } + }; + } + + public static TypeSafeMatcher columnName(String name) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(JSONObject jsonObject) { + return jsonObject.query("/name").equals(name); + } + + @Override + public void describeTo(Description description) { + description.appendText(String.format("(name=%s)", name)); + } + }; + } + + /** Tests if a string is equal to another string, ignore the case and whitespace. */ + public static class IsEqualIgnoreCaseAndWhiteSpace extends TypeSafeMatcher { + private final String string; + + public IsEqualIgnoreCaseAndWhiteSpace(String string) { + if (string == null) { + throw new IllegalArgumentException("Non-null value required"); + } + this.string = string; } - public static Matcher kvInt(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> (Integer) actual.query(key)); + @Override + public boolean matchesSafely(String item) { + return ignoreCase(ignoreSpaces(string)).equals(ignoreCase(ignoreSpaces(item))); } - @SafeVarargs - public static void verifySchema(JSONObject response, Matcher... matchers) { - verify(response.getJSONArray("schema"), matchers); + @Override + public void describeMismatchSafely(String item, Description mismatchDescription) { + mismatchDescription.appendText("was ").appendValue(item); } - @SafeVarargs - public static void verifyDataRows(JSONObject response, Matcher... matchers) { - verify(response.getJSONArray("datarows"), matchers); + @Override + public void describeTo(Description description) { + description + .appendText("a string equal to ") + .appendValue(string) + .appendText(" ignore case and white space"); } - @SafeVarargs - public static void verifyColumn(JSONObject response, Matcher... matchers) { - verify(response.getJSONArray("schema"), matchers); + public String ignoreSpaces(String toBeStripped) { + return toBeStripped.replaceAll("\\s+", "").trim(); } - @SafeVarargs - public static void verifyOrder(JSONObject response, Matcher... matchers) { - verifyOrder(response.getJSONArray("datarows"), matchers); + public String ignoreCase(String toBeLower) { + return toBeLower.toLowerCase(); } - @SafeVarargs - @SuppressWarnings("unchecked") - public static void verifyDataRowsInOrder(JSONObject response, Matcher... matchers) { - verifyInOrder(response.getJSONArray("datarows"), matchers); - } - - @SuppressWarnings("unchecked") - public static void verify(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - assertEquals(matchers.length, objects.size()); - assertThat(objects, containsInAnyOrder(matchers)); - } - - @SafeVarargs - @SuppressWarnings("unchecked") - public static void verifyInOrder(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - assertEquals(matchers.length, objects.size()); - assertThat(objects, contains(matchers)); - } - - @SuppressWarnings("unchecked") - public static void verifySome(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - - assertThat(matchers.length, greaterThan(0)); - for (Matcher matcher : matchers) { - assertThat(objects, hasItems(matcher)); - } - } - - @SafeVarargs - public static void verifyOrder(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - assertEquals(matchers.length, objects.size()); - assertThat(objects, containsInRelativeOrder(matchers)); - } - - public static TypeSafeMatcher schema(String expectedName, String expectedAlias, String expectedType) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText( - String.format("(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); - } - - @Override - protected boolean matchesSafely(JSONObject jsonObject) { - String actualName = (String) jsonObject.query("/name"); - String actualAlias = (String) jsonObject.query("/alias"); - String actualType = (String) jsonObject.query("/type"); - return expectedName.equals(actualName) && - (Strings.isNullOrEmpty(actualAlias) && Strings.isNullOrEmpty(expectedAlias) || - expectedAlias.equals(actualAlias)) && - expectedType.equals(actualType); - } - }; - } - - public static TypeSafeMatcher rows(Object... expectedObjects) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText(String.join(",", Arrays.asList(expectedObjects).toString())); - } - - @Override - protected boolean matchesSafely(JSONArray array) { - List actualObjects = new ArrayList<>(); - array.iterator().forEachRemaining(actualObjects::add); - return Arrays.asList(expectedObjects).equals(actualObjects); - } - }; - } - - public static TypeSafeMatcher columnPattern(String regex) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(JSONObject jsonObject) { - return ((String)jsonObject.query("/name")).matches(regex); - } - - @Override - public void describeTo(Description description) { - description.appendText(String.format("(column_pattern=%s)", regex)); - } - }; - } - - public static TypeSafeMatcher columnName(String name) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(JSONObject jsonObject) { - return jsonObject.query("/name").equals(name); - } - - @Override - public void describeTo(Description description) { - description.appendText(String.format("(name=%s)", name)); - } - }; - } - - - /** - * Tests if a string is equal to another string, ignore the case and whitespace. - */ - public static class IsEqualIgnoreCaseAndWhiteSpace extends TypeSafeMatcher { - private final String string; - - public IsEqualIgnoreCaseAndWhiteSpace(String string) { - if (string == null) { - throw new IllegalArgumentException("Non-null value required"); - } - this.string = string; - } - - @Override - public boolean matchesSafely(String item) { - return ignoreCase(ignoreSpaces(string)).equals(ignoreCase(ignoreSpaces(item))); - } - - @Override - public void describeMismatchSafely(String item, Description mismatchDescription) { - mismatchDescription.appendText("was ").appendValue(item); - } - - @Override - public void describeTo(Description description) { - description.appendText("a string equal to ") - .appendValue(string) - .appendText(" ignore case and white space"); - } - - public String ignoreSpaces(String toBeStripped) { - return toBeStripped.replaceAll("\\s+", "").trim(); - } - - public String ignoreCase(String toBeLower) { - return toBeLower.toLowerCase(); - } - - public static Matcher equalToIgnoreCaseAndWhiteSpace(String expectedString) { - return new IsEqualIgnoreCaseAndWhiteSpace(expectedString); - } + public static Matcher equalToIgnoreCaseAndWhiteSpace(String expectedString) { + return new IsEqualIgnoreCaseAndWhiteSpace(expectedString); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java index ff15cd698c..58234d73b7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import static org.mockito.Matchers.any; @@ -24,170 +23,179 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test Utility which provide the cluster have 2 indices. - */ +/** Test Utility which provide the cluster have 2 indices. */ public class MultipleIndexClusterUtils { - public final static String INDEX_ACCOUNT_1 = "account1"; - public final static String INDEX_ACCOUNT_2 = "account2"; - public final static String INDEX_ACCOUNT_ALL = "account*"; + public static final String INDEX_ACCOUNT_1 = "account1"; + public static final String INDEX_ACCOUNT_2 = "account2"; + public static final String INDEX_ACCOUNT_ALL = "account*"; - public static String INDEX_ACCOUNT_1_MAPPING = "{\n" + - " \"field_mappings\": {\n" + - " \"mappings\": {\n" + - " \"account1\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"int\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 1,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\"\n" + - " }\n" + - "}"; + public static String INDEX_ACCOUNT_1_MAPPING = + "{\n" + + " \"field_mappings\": {\n" + + " \"mappings\": {\n" + + " \"account1\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"int\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 1,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\",\n" + + " \"aliases_version\": \"1\"\n" + + " }\n" + + "}"; - /** - * The difference with account1. - * 1. missing address. - * 2. age has different type. - * 3. projects.started_year has different type. - */ - public static String INDEX_ACCOUNT_2_MAPPING = "{\n" + - " \"field_mappings\": {\n" + - " \"mappings\": {\n" + - " \"account2\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 1,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\"\n" + - " }\n" + - "}"; + /** + * The difference with account1. 1. missing address. 2. age has different type. 3. + * projects.started_year has different type. + */ + public static String INDEX_ACCOUNT_2_MAPPING = + "{\n" + + " \"field_mappings\": {\n" + + " \"mappings\": {\n" + + " \"account2\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 1,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\",\n" + + " \"aliases_version\": \"1\"\n" + + " }\n" + + "}"; - public static void mockMultipleIndexEnv() { - mockLocalClusterState( - Map.of(INDEX_ACCOUNT_1, buildIndexMapping(INDEX_ACCOUNT_1, INDEX_ACCOUNT_1_MAPPING), - INDEX_ACCOUNT_2, buildIndexMapping(INDEX_ACCOUNT_2, INDEX_ACCOUNT_2_MAPPING), - INDEX_ACCOUNT_ALL, buildIndexMapping(Map.of(INDEX_ACCOUNT_1, INDEX_ACCOUNT_1_MAPPING, - INDEX_ACCOUNT_2, INDEX_ACCOUNT_2_MAPPING)))); - } + public static void mockMultipleIndexEnv() { + mockLocalClusterState( + Map.of( + INDEX_ACCOUNT_1, + buildIndexMapping(INDEX_ACCOUNT_1, INDEX_ACCOUNT_1_MAPPING), + INDEX_ACCOUNT_2, + buildIndexMapping(INDEX_ACCOUNT_2, INDEX_ACCOUNT_2_MAPPING), + INDEX_ACCOUNT_ALL, + buildIndexMapping( + Map.of( + INDEX_ACCOUNT_1, + INDEX_ACCOUNT_1_MAPPING, + INDEX_ACCOUNT_2, + INDEX_ACCOUNT_2_MAPPING)))); + } - public static void mockLocalClusterState(Map> indexMapping) { - LocalClusterState.state().setClusterService(mockClusterService(indexMapping)); - LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); - LocalClusterState.state().setPluginSettings(mockPluginSettings()); - } + public static void mockLocalClusterState(Map> indexMapping) { + LocalClusterState.state().setClusterService(mockClusterService(indexMapping)); + LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); + LocalClusterState.state().setPluginSettings(mockPluginSettings()); + } + public static ClusterService mockClusterService( + Map> indexMapping) { + ClusterService mockService = mock(ClusterService.class); + ClusterState mockState = mock(ClusterState.class); + Metadata mockMetaData = mock(Metadata.class); - public static ClusterService mockClusterService(Map> - indexMapping) { - ClusterService mockService = mock(ClusterService.class); - ClusterState mockState = mock(ClusterState.class); - Metadata mockMetaData = mock(Metadata.class); - - when(mockService.state()).thenReturn(mockState); - when(mockState.metadata()).thenReturn(mockMetaData); - try { - for (var entry : indexMapping.entrySet()) { - when(mockMetaData.findMappings(eq(new String[]{entry.getKey()}), any())) - .thenReturn(entry.getValue()); - } - } catch (IOException e) { - throw new IllegalStateException(e); - } - return mockService; + when(mockService.state()).thenReturn(mockState); + when(mockState.metadata()).thenReturn(mockMetaData); + try { + for (var entry : indexMapping.entrySet()) { + when(mockMetaData.findMappings(eq(new String[] {entry.getKey()}), any())) + .thenReturn(entry.getValue()); + } + } catch (IOException e) { + throw new IllegalStateException(e); } + return mockService; + } - private static Map buildIndexMapping(Map indexMapping) { - return indexMapping.entrySet().stream().collect(Collectors.toUnmodifiableMap( - Map.Entry::getKey, e -> { - try { - return IndexMetadata.fromXContent(createParser(e.getValue())).mapping(); - } catch (IOException ex) { - throw new IllegalStateException(ex); - } - })); - - } + private static Map buildIndexMapping(Map indexMapping) { + return indexMapping.entrySet().stream() + .collect( + Collectors.toUnmodifiableMap( + Map.Entry::getKey, + e -> { + try { + return IndexMetadata.fromXContent(createParser(e.getValue())).mapping(); + } catch (IOException ex) { + throw new IllegalStateException(ex); + } + })); + } - private static Map buildIndexMapping(String index, String mapping) { - try { - return Map.of(index, IndexMetadata.fromXContent(createParser(mapping)).mapping()); - } catch (IOException e) { - throw new IllegalStateException(e); - } + private static Map buildIndexMapping(String index, String mapping) { + try { + return Map.of(index, IndexMetadata.fromXContent(createParser(mapping)).mapping()); + } catch (IOException e) { + throw new IllegalStateException(e); } + } }