diff --git a/build.gradle b/build.gradle index 4f5813fb22..2bdc4865bb 100644 --- a/build.gradle +++ b/build.gradle @@ -84,9 +84,16 @@ repositories { spotless { java { target fileTree('.') { - include 'common/**/*.java', - 'datasources/**/*.java', - 'core/**/*.java' + include 'datasources/**/*.java', + 'core/**/*.java', + 'protocol/**/*.java', + 'prometheus/**/*.java', + 'sql/**/*.java', + 'common/**/*.java', + 'spark/**/*.java', + 'plugin/**/*.java', + 'ppl/**/*.java', + 'integ-test/**/*java' exclude '**/build/**', '**/build-*/**' } importOrder() diff --git a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java index ad3713ec9a..d5e8b93b13 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java @@ -161,7 +161,7 @@ public LogicalPlan visitRelation(Relation node, AnalysisContext context) { table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v)); table .getReservedFieldTypes() - .forEach((k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v)); + .forEach((k, v) -> curEnv.define(new Symbol(Namespace.HIDDEN_FIELD_NAME, k), v)); // Put index name or its alias in index namespace on type environment so qualifier // can be removed when analyzing qualified name. The value (expr type) here doesn't matter. @@ -215,7 +215,7 @@ public LogicalPlan visitTableFunction(TableFunction node, AnalysisContext contex table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v)); table .getReservedFieldTypes() - .forEach((k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v)); + .forEach((k, v) -> curEnv.define(new Symbol(Namespace.HIDDEN_FIELD_NAME, k), v)); curEnv.define( new Symbol( Namespace.INDEX_NAME, dataSourceSchemaIdentifierNameResolver.getIdentifierName()), diff --git a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java index 8e586f68ff..5a8d6fe976 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java @@ -378,7 +378,7 @@ public Expression visitQualifiedName(QualifiedName node, AnalysisContext context typeEnv != null; typeEnv = typeEnv.getParent()) { Optional exprType = - typeEnv.getReservedSymbolTable().lookup(new Symbol(Namespace.FIELD_NAME, part)); + Optional.ofNullable(typeEnv.lookupAllFields(Namespace.HIDDEN_FIELD_NAME).get(part)); if (exprType.isPresent()) { return visitMetadata( qualifierAnalyzer.unqualified(node), (ExprCoreType) exprType.get(), context); diff --git a/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java b/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java index 8baab64810..18693a63e6 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java +++ b/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java @@ -25,8 +25,6 @@ public class TypeEnvironment implements Environment { @Getter private final TypeEnvironment parent; private final SymbolTable symbolTable; - @Getter private final SymbolTable reservedSymbolTable; - /** * Constructor with empty symbol tables. * @@ -35,7 +33,6 @@ public class TypeEnvironment implements Environment { public TypeEnvironment(TypeEnvironment parent) { this.parent = parent; this.symbolTable = new SymbolTable(); - this.reservedSymbolTable = new SymbolTable(); } /** @@ -47,7 +44,6 @@ public TypeEnvironment(TypeEnvironment parent) { public TypeEnvironment(TypeEnvironment parent, SymbolTable symbolTable) { this.parent = parent; this.symbolTable = symbolTable; - this.reservedSymbolTable = new SymbolTable(); } /** @@ -123,8 +119,4 @@ public void remove(ReferenceExpression ref) { public void clearAllFields() { lookupAllFields(FIELD_NAME).keySet().forEach(v -> remove(new Symbol(Namespace.FIELD_NAME, v))); } - - public void addReservedWord(Symbol symbol, ExprType type) { - reservedSymbolTable.store(symbol, type); - } } diff --git a/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java b/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java index 8211207b2e..e8a7454014 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java +++ b/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java @@ -9,6 +9,7 @@ public enum Namespace { INDEX_NAME("Index"), FIELD_NAME("Field"), + HIDDEN_FIELD_NAME("HiddenField"), FUNCTION_NAME("Function"); private final String name; diff --git a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java index 9d30ebeaab..b27b8348e2 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java @@ -216,13 +216,13 @@ public void qualified_name_with_qualifier() { public void qualified_name_with_reserved_symbol() { analysisContext.push(); - analysisContext.peek().addReservedWord(new Symbol(Namespace.FIELD_NAME, "_reserved"), STRING); - analysisContext.peek().addReservedWord(new Symbol(Namespace.FIELD_NAME, "_priority"), FLOAT); + analysisContext.peek().define(new Symbol(Namespace.HIDDEN_FIELD_NAME, "_reserved"), STRING); + analysisContext.peek().define(new Symbol(Namespace.HIDDEN_FIELD_NAME, "_priority"), FLOAT); analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual(DSL.ref("_priority", FLOAT), qualifiedName("_priority")); assertAnalyzeEqual(DSL.ref("_reserved", STRING), qualifiedName("index_alias", "_reserved")); - // reserved fields take priority over symbol table + // cannot replace an existing field type analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "_reserved"), LONG); assertAnalyzeEqual(DSL.ref("_reserved", STRING), qualifiedName("index_alias", "_reserved")); diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 5b9c113012..508d28b825 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -56,6 +56,11 @@ repositories { } } +// Being ignored as a temporary measure before being removed in favour of +// spotless https://github.com/opensearch-project/sql/issues/1101 +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + ext { projectSubstitutions = [:] licenseFile = rootProject.file('LICENSE.TXT') diff --git a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java index 799dadcd2d..8c6c5d6710 100644 --- a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.bwc; - import static org.opensearch.sql.legacy.TestUtils.createIndexByRestClient; import static org.opensearch.sql.legacy.TestUtils.isIndexExist; import static org.opensearch.sql.legacy.TestUtils.loadDataByRestClient; @@ -37,177 +35,189 @@ public class SQLBackwardsCompatibilityIT extends SQLIntegTestCase { - private static final ClusterType CLUSTER_TYPE = ClusterType.parse(System.getProperty("tests.rest.bwcsuite")); - private static final String CLUSTER_NAME = System.getProperty("tests.clustername"); - - @Override - protected final boolean preserveIndicesUponCompletion() { - return true; - } - - @Override - protected final boolean preserveReposUponCompletion() { - return true; - } - - @Override - protected boolean preserveTemplatesUponCompletion() { - return true; - } - - @Override - protected final Settings restClientSettings() { - return Settings - .builder() - .put(super.restClientSettings()) - // increase the timeout here to 90 seconds to handle long waits for a green - // cluster health. the waits for green need to be longer than a minute to - // account for delayed shards - .put(OpenSearchRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s") - .build(); + private static final ClusterType CLUSTER_TYPE = + ClusterType.parse(System.getProperty("tests.rest.bwcsuite")); + private static final String CLUSTER_NAME = System.getProperty("tests.clustername"); + + @Override + protected final boolean preserveIndicesUponCompletion() { + return true; + } + + @Override + protected final boolean preserveReposUponCompletion() { + return true; + } + + @Override + protected boolean preserveTemplatesUponCompletion() { + return true; + } + + @Override + protected final Settings restClientSettings() { + return Settings.builder() + .put(super.restClientSettings()) + // increase the timeout here to 90 seconds to handle long waits for a green + // cluster health. the waits for green need to be longer than a minute to + // account for delayed shards + .put(OpenSearchRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s") + .build(); + } + + private enum ClusterType { + OLD, + MIXED, + UPGRADED; + + public static ClusterType parse(String value) { + switch (value) { + case "old_cluster": + return OLD; + case "mixed_cluster": + return MIXED; + case "upgraded_cluster": + return UPGRADED; + default: + throw new AssertionError("unknown cluster type: " + value); + } } - - private enum ClusterType { - OLD, - MIXED, - UPGRADED; - - public static ClusterType parse(String value) { - switch (value) { - case "old_cluster": - return OLD; - case "mixed_cluster": - return MIXED; - case "upgraded_cluster": - return UPGRADED; - default: - throw new AssertionError("unknown cluster type: " + value); - } - } + } + + @SuppressWarnings("unchecked") + public void testBackwardsCompatibility() throws Exception { + String uri = getUri(); + Map> responseMap = + (Map>) getAsMap(uri).get("nodes"); + for (Map response : responseMap.values()) { + List> plugins = (List>) response.get("plugins"); + Set pluginNames = + plugins.stream().map(map -> map.get("name")).collect(Collectors.toSet()); + switch (CLUSTER_TYPE) { + case OLD: + Assert.assertTrue(pluginNames.contains("opensearch-sql")); + updateLegacySQLSettings(); + loadIndex(Index.ACCOUNT); + verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); + break; + case MIXED: + Assert.assertTrue(pluginNames.contains("opensearch-sql")); + verifySQLSettings(); + verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); + break; + case UPGRADED: + Assert.assertTrue(pluginNames.contains("opensearch-sql")); + verifySQLSettings(); + verifySQLQueries(QUERY_API_ENDPOINT); + break; + } + break; } - - @SuppressWarnings("unchecked") - public void testBackwardsCompatibility() throws Exception { - String uri = getUri(); - Map> responseMap = (Map>) getAsMap(uri).get("nodes"); - for (Map response : responseMap.values()) { - List> plugins = (List>) response.get("plugins"); - Set pluginNames = plugins.stream().map(map -> map.get("name")).collect(Collectors.toSet()); - switch (CLUSTER_TYPE) { - case OLD: - Assert.assertTrue(pluginNames.contains("opensearch-sql")); - updateLegacySQLSettings(); - loadIndex(Index.ACCOUNT); - verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); - break; - case MIXED: - Assert.assertTrue(pluginNames.contains("opensearch-sql")); - verifySQLSettings(); - verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); - break; - case UPGRADED: - Assert.assertTrue(pluginNames.contains("opensearch-sql")); - verifySQLSettings(); - verifySQLQueries(QUERY_API_ENDPOINT); - break; - } - break; + } + + private String getUri() { + switch (CLUSTER_TYPE) { + case OLD: + return "_nodes/" + CLUSTER_NAME + "-0/plugins"; + case MIXED: + String round = System.getProperty("tests.rest.bwcsuite_round"); + if (round.equals("second")) { + return "_nodes/" + CLUSTER_NAME + "-1/plugins"; + } else if (round.equals("third")) { + return "_nodes/" + CLUSTER_NAME + "-2/plugins"; + } else { + return "_nodes/" + CLUSTER_NAME + "-0/plugins"; } + case UPGRADED: + return "_nodes/plugins"; + default: + throw new AssertionError("unknown cluster type: " + CLUSTER_TYPE); } - - private String getUri() { - switch (CLUSTER_TYPE) { - case OLD: - return "_nodes/" + CLUSTER_NAME + "-0/plugins"; - case MIXED: - String round = System.getProperty("tests.rest.bwcsuite_round"); - if (round.equals("second")) { - return "_nodes/" + CLUSTER_NAME + "-1/plugins"; - } else if (round.equals("third")) { - return "_nodes/" + CLUSTER_NAME + "-2/plugins"; - } else { - return "_nodes/" + CLUSTER_NAME + "-0/plugins"; - } - case UPGRADED: - return "_nodes/plugins"; - default: - throw new AssertionError("unknown cluster type: " + CLUSTER_TYPE); - } + } + + private void updateLegacySQLSettings() throws IOException { + Request request = new Request("PUT", LEGACY_SQL_SETTINGS_API_ENDPOINT); + request.setJsonEntity( + String.format( + Locale.ROOT, + "{\n" + " \"persistent\" : {\n \"%s\" : \"%s\"\n }\n}", + "opendistro.sql.cursor.keep_alive", + "7m")); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + JSONObject jsonObject = new JSONObject(getResponseBody(response)); + Assert.assertTrue((boolean) jsonObject.get("acknowledged")); + } + + private void verifySQLSettings() throws IOException { + Request request = new Request("GET", "_cluster/settings?flat_settings"); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + JSONObject jsonObject = new JSONObject(getResponseBody(response)); + Assert.assertEquals( + "{\"transient\":{},\"persistent\":{\"opendistro.sql.cursor.keep_alive\":\"7m\"}}", + jsonObject.toString()); + } + + private void verifySQLQueries(String endpoint) throws IOException { + JSONObject filterResponse = + executeSQLQuery( + endpoint, + "SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); + verifySchema(filterResponse, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); + verifyDataRows(filterResponse, rows(238)); + + JSONObject aggResponse = + executeSQLQuery( + endpoint, "SELECT COUNT(DISTINCT age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); + verifySchema(aggResponse, schema("COUNT(DISTINCT age)", null, "integer")); + verifyDataRows(aggResponse, rows(21)); + + JSONObject groupByResponse = + executeSQLQuery( + endpoint, + "select a.gender from " + + TestsConstants.TEST_INDEX_ACCOUNT + + " a group by a.gender having count(*) > 0"); + verifySchema(groupByResponse, schema("gender", null, "text")); + Assert.assertEquals("[[\"F\"],[\"M\"]]", groupByResponse.getJSONArray("datarows").toString()); + } + + private JSONObject executeSQLQuery(String endpoint, String query) throws IOException { + Request request = new Request("POST", endpoint); + request.setJsonEntity(String.format(Locale.ROOT, "{ \"query\" : \"%s\"}", query)); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + return new JSONObject(getResponseBody(response)); + } + + @Override + public boolean shouldResetQuerySizeLimit() { + return false; + } + + @Override + protected synchronized void loadIndex(Index index) throws IOException { + String indexName = index.getName(); + String mapping = index.getMapping(); + // current directory becomes 'integ-test/build/testrun/sqlBwcCluster#' during bwc + String dataSet = "../../../" + index.getDataSet(); + + if (!isIndexExist(client(), indexName)) { + createIndexByRestClient(client(), indexName, mapping); + loadDataByRestClient(client(), indexName, dataSet); } - - private void updateLegacySQLSettings() throws IOException { - Request request = new Request("PUT", LEGACY_SQL_SETTINGS_API_ENDPOINT); - request.setJsonEntity(String.format(Locale.ROOT, "{\n" + - " \"persistent\" : {\n" + - " \"%s\" : \"%s\"\n" + - " }\n" + - "}", "opendistro.sql.cursor.keep_alive", "7m")); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - JSONObject jsonObject = new JSONObject(getResponseBody(response)); - Assert.assertTrue((boolean) jsonObject.get("acknowledged")); - } - - private void verifySQLSettings() throws IOException { - Request request = new Request("GET", "_cluster/settings?flat_settings"); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - JSONObject jsonObject = new JSONObject(getResponseBody(response)); - Assert.assertEquals("{\"transient\":{},\"persistent\":{\"opendistro.sql.cursor.keep_alive\":\"7m\"}}", jsonObject.toString()); - } - - private void verifySQLQueries(String endpoint) throws IOException { - JSONObject filterResponse = executeSQLQuery(endpoint, "SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); - verifySchema(filterResponse, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); - verifyDataRows(filterResponse, rows(238)); - - JSONObject aggResponse = executeSQLQuery(endpoint, "SELECT COUNT(DISTINCT age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); - verifySchema(aggResponse, schema("COUNT(DISTINCT age)", null, "integer")); - verifyDataRows(aggResponse, rows(21)); - - JSONObject groupByResponse = executeSQLQuery(endpoint, "select a.gender from " + TestsConstants.TEST_INDEX_ACCOUNT + " a group by a.gender having count(*) > 0"); - verifySchema(groupByResponse, schema("gender", null, "text")); - Assert.assertEquals("[[\"F\"],[\"M\"]]", groupByResponse.getJSONArray("datarows").toString()); - } - - private JSONObject executeSQLQuery(String endpoint, String query) throws IOException { - Request request = new Request("POST", endpoint); - request.setJsonEntity(String.format(Locale.ROOT, "{" + - " \"query\" : \"%s\"" + - "}", query)); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - return new JSONObject(getResponseBody(response)); - } - - @Override - public boolean shouldResetQuerySizeLimit() { - return false; - } - - @Override - protected synchronized void loadIndex(Index index) throws IOException { - String indexName = index.getName(); - String mapping = index.getMapping(); - // current directory becomes 'integ-test/build/testrun/sqlBwcCluster#' during bwc - String dataSet = "../../../" + index.getDataSet(); - - if (!isIndexExist(client(), indexName)) { - createIndexByRestClient(client(), indexName, mapping); - loadDataByRestClient(client(), indexName, dataSet); - } - } - + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java b/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java index 9ec80c55a7..329aed80aa 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness; import static org.opensearch.sql.util.TestUtils.getResourceFilePath; @@ -32,11 +31,12 @@ import org.opensearch.sql.correctness.testset.TestDataSet; import org.opensearch.test.OpenSearchIntegTestCase; -/** - * Correctness integration test by performing comparison test with other databases. - */ +/** Correctness integration test by performing comparison test with other databases. */ @OpenSearchIntegTestCase.SuiteScopeTestCase -@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 3, supportsDedicatedMasters = false) +@OpenSearchIntegTestCase.ClusterScope( + scope = OpenSearchIntegTestCase.Scope.SUITE, + numDataNodes = 3, + supportsDedicatedMasters = false) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) public class CorrectnessIT extends OpenSearchIntegTestCase { @@ -47,8 +47,8 @@ public void performComparisonTest() throws URISyntaxException { TestConfig config = new TestConfig(getCmdLineArgs()); LOG.info("Starting comparison test {}", config); - try (ComparisonTest test = new ComparisonTest(getThisDBConnection(config), - getOtherDBConnections(config))) { + try (ComparisonTest test = + new ComparisonTest(getThisDBConnection(config), getOtherDBConnections(config))) { LOG.info("Loading test data set..."); test.connect(); for (TestDataSet dataSet : config.getTestDataSets()) { @@ -81,9 +81,7 @@ private DBConnection getThisDBConnection(TestConfig config) throws URISyntaxExce return new JDBCConnection("DB Tested", dbUrl); } - /** - * Use OpenSearch cluster given on CLI arg or internal embedded in SQLIntegTestCase - */ + /** Use OpenSearch cluster given on CLI arg or internal embedded in SQLIntegTestCase */ private DBConnection getOpenSearchConnection(TestConfig config) throws URISyntaxException { RestClient client; String openSearchHost = config.getOpenSearchHostUrl(); @@ -96,14 +94,11 @@ private DBConnection getOpenSearchConnection(TestConfig config) throws URISyntax return new OpenSearchConnection("jdbc:opensearch://" + openSearchHost, client); } - /** - * Create database connection with database name and connect URL - */ + /** Create database connection with database name and connect URL */ private DBConnection[] getOtherDBConnections(TestConfig config) { - return config.getOtherDbConnectionNameAndUrls(). - entrySet().stream(). - map(e -> new JDBCConnection(e.getKey(), e.getValue())). - toArray(DBConnection[]::new); + return config.getOtherDbConnectionNameAndUrls().entrySet().stream() + .map(e -> new JDBCConnection(e.getKey(), e.getValue())) + .toArray(DBConnection[]::new); } private void store(TestReport report) { diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java b/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java index d344c29e20..16a172e384 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness; import static java.util.stream.Collectors.joining; @@ -19,11 +18,15 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** + * + * + *
  * Test configuration parse the following information from command line arguments:
  * 1) Test schema and data
  * 2) Test queries
  * 3) OpenSearch connection URL
  * 4) Other database connection URLs
+ * 
*/ public class TestConfig { @@ -37,9 +40,7 @@ public class TestConfig { private final String openSearchHostUrl; - /** - * Test against some database rather than OpenSearch via our JDBC driver - */ + /** Test against some database rather than OpenSearch via our JDBC driver */ private final String dbConnectionUrl; private final Map otherDbConnectionNameAndUrls = new HashMap<>(); @@ -75,12 +76,14 @@ public Map getOtherDbConnectionNameAndUrls() { private TestDataSet[] buildDefaultTestDataSet() { return new TestDataSet[] { - new TestDataSet("opensearch_dashboards_sample_data_flights", - readFile("opensearch_dashboards_sample_data_flights.json"), - readFile("opensearch_dashboards_sample_data_flights.csv")), - new TestDataSet("opensearch_dashboards_sample_data_ecommerce", - readFile("opensearch_dashboards_sample_data_ecommerce.json"), - readFile("opensearch_dashboards_sample_data_ecommerce.csv")), + new TestDataSet( + "opensearch_dashboards_sample_data_flights", + readFile("opensearch_dashboards_sample_data_flights.json"), + readFile("opensearch_dashboards_sample_data_flights.csv")), + new TestDataSet( + "opensearch_dashboards_sample_data_ecommerce", + readFile("opensearch_dashboards_sample_data_ecommerce.json"), + readFile("opensearch_dashboards_sample_data_ecommerce.csv")), }; } @@ -118,31 +121,33 @@ private static String readFile(String relativePath) { @Override public String toString() { return "\n=================================\n" - + "Tested Database : " + openSearchHostUrlToString() + '\n' - + "Other Databases :\n" + otherDbConnectionInfoToString() + '\n' - + "Test data set(s) :\n" + testDataSetsToString() + '\n' - + "Test query set : " + testQuerySet + '\n' - + "=================================\n"; + + "Tested Database : " + + openSearchHostUrlToString() + + "\nOther Databases :\n" + + otherDbConnectionInfoToString() + + "\nTest data set(s) :\n" + + testDataSetsToString() + + "\nTest query set : " + + testQuerySet + + "\n=================================\n"; } private String testDataSetsToString() { - return Arrays.stream(testDataSets). - map(TestDataSet::toString). - collect(joining("\n")); + return Arrays.stream(testDataSets).map(TestDataSet::toString).collect(joining("\n")); } private String openSearchHostUrlToString() { if (!dbConnectionUrl.isEmpty()) { return dbConnectionUrl; } - return openSearchHostUrl.isEmpty() ? "(Use internal OpenSearch in workspace)" : - openSearchHostUrl; + return openSearchHostUrl.isEmpty() + ? "(Use internal OpenSearch in workspace)" + : openSearchHostUrl; } private String otherDbConnectionInfoToString() { - return otherDbConnectionNameAndUrls.entrySet().stream(). - map(e -> StringUtils.format(" %s = %s", e.getKey(), e.getValue())). - collect(joining("\n")); + return otherDbConnectionNameAndUrls.entrySet().stream() + .map(e -> StringUtils.format(" %s = %s", e.getKey(), e.getValue())) + .collect(joining("\n")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java index cb13a01f98..1d69ff10ee 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.FAILURE; @@ -12,22 +11,17 @@ import lombok.Getter; import lombok.ToString; -/** - * Report for test case that ends with an error. - */ +/** Report for test case that ends with an error. */ @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Getter public class ErrorTestCase extends TestCaseReport { - /** - * Root cause of the error - */ + /** Root cause of the error */ private final String reason; public ErrorTestCase(int id, String sql, String reason) { super(id, sql, FAILURE); this.reason = reason; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java index 86693b98e9..2b5ab431e4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.FAILURE; @@ -16,30 +15,21 @@ import lombok.ToString; import org.opensearch.sql.correctness.runner.resultset.DBResult; -/** - * Report for test case that fails due to inconsistent result set. - */ +/** Report for test case that fails due to inconsistent result set. */ @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Getter public class FailedTestCase extends TestCaseReport { - /** - * Inconsistent result sets for reporting - */ + /** Inconsistent result sets for reporting */ private final List resultSets; - /** - * Explain where the difference is caused the test failure. - */ + /** Explain where the difference is caused the test failure. */ private final String explain; - /** - * Errors occurred for partial other databases. - */ + /** Errors occurred for partial other databases. */ private final String errors; - public FailedTestCase(int id, String sql, List resultSets, String errors) { super(id, sql, FAILURE); this.resultSets = resultSets; @@ -47,10 +37,9 @@ public FailedTestCase(int id, String sql, List resultSets, String erro this.errors = errors; // Generate explanation by diff the first result with remaining - this.explain = resultSets.subList(1, resultSets.size()) - .stream() - .map(result -> resultSets.get(0).diff(result)) - .collect(Collectors.joining(", ")); + this.explain = + resultSets.subList(1, resultSets.size()).stream() + .map(result -> resultSets.get(0).diff(result)) + .collect(Collectors.joining(", ")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java index 62cd9b3fbe..8ec996e660 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.SUCCESS; @@ -12,9 +11,7 @@ import lombok.Getter; import lombok.ToString; -/** - * Report for successful test case result. - */ +/** Report for successful test case result. */ @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Getter diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java index 1a6285c52e..7567e9cd6a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.SUCCESS; @@ -12,22 +11,19 @@ import lombok.Getter; import lombok.ToString; -/** - * Base class for different test result. - */ +/** Base class for different test result. */ @EqualsAndHashCode @ToString public abstract class TestCaseReport { public enum TestResult { - SUCCESS, FAILURE; + SUCCESS, + FAILURE; } - @Getter - private final int id; + @Getter private final int id; - @Getter - private final String sql; + @Getter private final String sql; private final TestResult result; @@ -40,5 +36,4 @@ public TestCaseReport(int id, String sql, TestResult result) { public String getResult() { return result == SUCCESS ? "Success" : "Failed"; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java index 88b23ccd5b..9b9b3b7a23 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import java.util.ArrayList; @@ -12,9 +11,7 @@ import lombok.Getter; import lombok.ToString; -/** - * Test report class to generate JSON report. - */ +/** Test report class to generate JSON report. */ @EqualsAndHashCode @ToString @Getter @@ -37,5 +34,4 @@ public void addTestCase(TestCaseReport testCase) { summary.addFailure(); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java index 90767582b5..bbd4385460 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; -/** - * Test summary section. - */ +/** Test summary section. */ @EqualsAndHashCode @ToString @Getter @@ -33,5 +30,4 @@ public void addFailure() { failure++; total++; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java index 129bc70426..1fee41f1fe 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner; import static com.google.common.collect.ObjectArrays.concat; @@ -25,24 +24,16 @@ import org.opensearch.sql.correctness.testset.TestQuerySet; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Comparison test runner for query result correctness. - */ +/** Comparison test runner for query result correctness. */ public class ComparisonTest implements AutoCloseable { - /** - * Next id for test case - */ + /** Next id for test case */ private int testCaseId = 1; - /** - * Connection for database being tested - */ + /** Connection for database being tested */ private final DBConnection thisConnection; - /** - * Database connections for reference databases - */ + /** Database connections for reference databases */ private final DBConnection[] otherDbConnections; public ComparisonTest(DBConnection thisConnection, DBConnection[] otherDbConnections) { @@ -53,9 +44,7 @@ public ComparisonTest(DBConnection thisConnection, DBConnection[] otherDbConnect Arrays.sort(this.otherDbConnections, Comparator.comparing(DBConnection::getDatabaseName)); } - /** - * Open database connection. - */ + /** Open database connection. */ public void connect() { for (DBConnection conn : concat(thisConnection, otherDbConnections)) { conn.connect(); @@ -87,8 +76,11 @@ public TestReport verify(TestQuerySet querySet) { DBResult openSearchResult = thisConnection.select(sql); report.addTestCase(compareWithOtherDb(sql, openSearchResult)); } catch (Exception e) { - report.addTestCase(new ErrorTestCase(nextId(), sql, - StringUtils.format("%s: %s", e.getClass().getSimpleName(), extractRootCause(e)))); + report.addTestCase( + new ErrorTestCase( + nextId(), + sql, + StringUtils.format("%s: %s", e.getClass().getSimpleName(), extractRootCause(e)))); } } return report; @@ -116,9 +108,7 @@ public void close() { } } - /** - * Execute the query and compare with current result - */ + /** Execute the query and compare with current result */ private TestCaseReport compareWithOtherDb(String sql, DBResult openSearchResult) { List mismatchResults = Lists.newArrayList(openSearchResult); StringBuilder reasons = new StringBuilder(); @@ -137,7 +127,8 @@ private TestCaseReport compareWithOtherDb(String sql, DBResult openSearchResult) } } - if (mismatchResults.size() == 1) { // Only OpenSearch result on list. Cannot find other database support this query + if (mismatchResults.size() + == 1) { // Only OpenSearch result on list. Cannot find other database support this query return new ErrorTestCase(nextId(), sql, "No other databases support this query: " + reasons); } return new FailedTestCase(nextId(), sql, mismatchResults, reasons.toString()); @@ -150,8 +141,8 @@ private int nextId() { private void insertTestDataInBatch(DBConnection conn, String tableName, List testData) { Iterator iterator = testData.iterator(); String[] fieldNames = (String[]) iterator.next(); // first row is header of column names - Iterators.partition(iterator, 100). - forEachRemaining(batch -> conn.insert(tableName, fieldNames, batch)); + Iterators.partition(iterator, 100) + .forEachRemaining(batch -> conn.insert(tableName, fieldNames, batch)); } private String extractRootCause(Throwable e) { @@ -167,5 +158,4 @@ private String extractRootCause(Throwable e) { } return e.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java index a475428735..b01762fd21 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.connection; import java.util.List; import org.opensearch.sql.correctness.runner.resultset.DBResult; -/** - * Abstraction for different databases. - */ +/** Abstraction for different databases. */ public interface DBConnection { /** @@ -19,25 +16,23 @@ public interface DBConnection { */ String getDatabaseName(); - /** - * Connect to database by opening a connection. - */ + /** Connect to database by opening a connection. */ void connect(); /** * Create table with the schema. * * @param tableName table name - * @param schema schema json in OpenSearch mapping format + * @param schema schema json in OpenSearch mapping format */ void create(String tableName, String schema); /** * Insert batch of data to database. * - * @param tableName table name + * @param tableName table name * @param columnNames column names - * @param batch batch of rows + * @param batch batch of rows */ void insert(String tableName, String[] columnNames, List batch); @@ -56,9 +51,6 @@ public interface DBConnection { */ void drop(String tableName); - /** - * Close the database connection. - */ + /** Close the database connection. */ void close(); - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java index d2d7d2aee6..7a67022117 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.connection; import static java.util.stream.Collectors.joining; @@ -23,33 +22,23 @@ import org.opensearch.sql.correctness.runner.resultset.Row; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Database connection by JDBC driver. - */ +/** Database connection by JDBC driver. */ public class JDBCConnection implements DBConnection { private static final String SINGLE_QUOTE = "'"; private static final String DOUBLE_QUOTE = "''"; private static final String BACKTICK = "`"; - /** - * Database name for display - */ + /** Database name for display */ private final String databaseName; - /** - * Database connection URL - */ + /** Database connection URL */ private final String connectionUrl; - /** - * JDBC driver config properties. - */ + /** JDBC driver config properties. */ private final Properties properties; - /** - * Current live connection - */ + /** Current live connection */ private Connection connection; public JDBCConnection(String databaseName, String connectionUrl) { @@ -58,9 +47,10 @@ public JDBCConnection(String databaseName, String connectionUrl) { /** * Create a JDBC connection with parameters given (but not connect to database at the moment). - * @param databaseName database name - * @param connectionUrl connection URL - * @param properties config properties + * + * @param databaseName database name + * @param connectionUrl connection URL + * @param properties config properties */ public JDBCConnection(String databaseName, String connectionUrl, Properties properties) { this.databaseName = databaseName; @@ -104,11 +94,11 @@ public void drop(String tableName) { @Override public void insert(String tableName, String[] columnNames, List batch) { try (Statement stmt = connection.createStatement()) { - String names = - Arrays.stream(columnNames).map(this::delimited).collect(joining(",")); + String names = Arrays.stream(columnNames).map(this::delimited).collect(joining(",")); for (Object[] fieldValues : batch) { - stmt.addBatch(StringUtils.format( - "INSERT INTO %s(%s) VALUES (%s)", tableName, names, getValueList(fieldValues))); + stmt.addBatch( + StringUtils.format( + "INSERT INTO %s(%s) VALUES (%s)", tableName, names, getValueList(fieldValues))); } stmt.executeBatch(); } catch (SQLException e) { @@ -120,8 +110,10 @@ public void insert(String tableName, String[] columnNames, List batch) public DBResult select(String query) { try (Statement stmt = connection.createStatement()) { ResultSet resultSet = stmt.executeQuery(query); - DBResult result = isOrderByQuery(query) - ? DBResult.resultInOrder(databaseName) : DBResult.result(databaseName); + DBResult result = + isOrderByQuery(query) + ? DBResult.resultInOrder(databaseName) + : DBResult.result(databaseName); populateMetaData(resultSet, result); populateData(resultSet, result); return result; @@ -140,20 +132,22 @@ public void close() { } /** - * Parse out type in schema json and convert to field name and type pairs for CREATE TABLE statement. + * Parse out type in schema json and convert to field name and type pairs for CREATE TABLE + * statement. */ private String parseColumnNameAndTypesInSchemaJson(String schema) { JSONObject json = (JSONObject) new JSONObject(schema).query("/mappings/properties"); - return json.keySet().stream(). - map(colName -> delimited(colName) + " " + mapToJDBCType(json.getJSONObject(colName) - .getString("type"))) + return json.keySet().stream() + .map( + colName -> + delimited(colName) + + " " + + mapToJDBCType(json.getJSONObject(colName).getString("type"))) .collect(joining(",")); } private String getValueList(Object[] fieldValues) { - return Arrays.stream(fieldValues). - map(this::convertValueObjectToString). - collect(joining(",")); + return Arrays.stream(fieldValues).map(this::convertValueObjectToString).collect(joining(",")); } private String convertValueObjectToString(Object value) { @@ -209,9 +203,7 @@ private boolean isOrderByQuery(String query) { return query.trim().toUpperCase().contains("ORDER BY"); } - /** - * Setter for unit test mock - */ + /** Setter for unit test mock */ public void setConnection(Connection connection) { this.connection = connection; } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java index 258c031b76..8a2240855f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.connection; import java.io.IOException; @@ -16,18 +15,15 @@ import org.opensearch.sql.correctness.runner.resultset.DBResult; /** - * OpenSearch database connection for insertion. This class wraps JDBCConnection to delegate query method. + * OpenSearch database connection for insertion. This class wraps JDBCConnection to delegate query + * method. */ public class OpenSearchConnection implements DBConnection { - /** - * Connection via our OpenSearch JDBC driver - */ + /** Connection via our OpenSearch JDBC driver */ private final DBConnection connection; - /** - * Native OpenSearch REST client for operation unsupported by driver such as CREATE/INSERT - */ + /** Native OpenSearch REST client for operation unsupported by driver such as CREATE/INSERT */ private final RestClient client; public OpenSearchConnection(String connectionUrl, RestClient client) { @@ -112,10 +108,8 @@ private String buildBulkBody(String[] columnNames, List batch) { } } - body.append("{\"index\":{}}\n"). - append(json).append("\n"); + body.append("{\"index\":{}}\n").append(json).append("\n"); } return body.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java index 52b7d26cc4..6ee3bb37f6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.resultset; import com.google.common.collect.HashMultiset; @@ -20,53 +19,44 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Query result for equality comparison. Based on different type of query, such as query with/without ORDER BY and - * query with SELECT columns or just *, order of column and row may matter or not. So the internal data structure of this - * class is passed in from outside either list or set, hash map or linked hash map etc. + * Query result for equality comparison. Based on different type of query, such as query + * with/without ORDER BY and query with SELECT columns or just *, order of column and row may matter + * or not. So the internal data structure of this class is passed in from outside either list or + * set, hash map or linked hash map etc. */ @ToString public class DBResult { /** - * Possible types for floating point number + * Possible types for floating point number.
* H2 2.x use DOUBLE PRECISION instead of DOUBLE. */ private static final Set FLOAT_TYPES = ImmutableSet.of("FLOAT", "DOUBLE", "REAL", "DOUBLE PRECISION", "DECFLOAT"); /** - * Possible types for varchar. + * Possible types for varchar.
* H2 2.x use CHARACTER VARYING instead of VARCHAR. */ private static final Set VARCHAR = ImmutableSet.of("CHARACTER VARYING", "VARCHAR"); - /** - * Database name for display - */ + /** Database name for display */ private final String databaseName; - /** - * Column name and types from result set meta data - */ - @Getter - private final Collection schema; + /** Column name and types from result set meta data */ + @Getter private final Collection schema; - /** - * Data rows from result set - */ + /** Data rows from result set */ private final Collection dataRows; - /** - * In theory, a result set is a multi-set (bag) that allows duplicate and doesn't - * have order. - */ + /** In theory, a result set is a multi-set (bag) that allows duplicate and doesn't have order. */ public static DBResult result(String databaseName) { return new DBResult(databaseName, new ArrayList<>(), HashMultiset.create()); } /** - * But for queries with ORDER BY clause, we want to preserve the original order of data rows - * so we can check if the order is correct. + * But for queries with ORDER BY clause, we want to preserve the original order of data rows so we + * can check if the order is correct. */ public static DBResult resultInOrder(String databaseName) { return new DBResult(databaseName, new ArrayList<>(), new ArrayList<>()); @@ -103,21 +93,20 @@ public String getDatabaseName() { return databaseName; } - /** - * Flatten for simplifying json generated. - */ + /** Flatten for simplifying json generated. */ public Collection> getDataRows() { - Collection> values = isDataRowOrdered() - ? new ArrayList<>() : HashMultiset.create(); + Collection> values = + isDataRowOrdered() ? new ArrayList<>() : HashMultiset.create(); dataRows.stream().map(Row::getValues).forEach(values::add); return values; } /** - * Explain the difference between this and other DB result which is helpful for - * troubleshooting in final test report. - * @param other other DB result - * @return explain the difference + * Explain the difference between this and other DB result which is helpful for troubleshooting in + * final test report. + * + * @param other other DB result + * @return explain the difference */ public String diff(DBResult other) { String result = diffSchema(other); @@ -143,26 +132,27 @@ private String diffDataRows(DBResult other) { } /** - * Check if two lists are same otherwise explain if size or any element - * is different at some position. + * Check if two lists are same otherwise explain if size or any element is different at some + * position. */ private String diff(String name, List thisList, List otherList) { if (thisList.size() != otherList.size()) { - return StringUtils.format("%s size is different: this=[%d], other=[%d]", - name, thisList.size(), otherList.size()); + return StringUtils.format( + "%s size is different: this=[%d], other=[%d]", name, thisList.size(), otherList.size()); } int diff = findFirstDifference(thisList, otherList); if (diff >= 0) { - return StringUtils.format("%s at [%d] is different: this=[%s], other=[%s]", + return StringUtils.format( + "%s at [%d] is different: this=[%s], other=[%s]", name, diff, thisList.get(diff), otherList.get(diff)); } return ""; } /** - * Find first different element with assumption that the lists given have same size - * and there is no NULL element inside. + * Find first different element with assumption that the lists given have same size and there is + * no NULL element inside. */ private static int findFirstDifference(List list1, List list2) { for (int i = 0; i < list1.size(); i++) { @@ -174,16 +164,14 @@ private static int findFirstDifference(List list1, List list2) { } /** - * Is data row a list that represent original order of data set - * which doesn't/shouldn't sort again. + * Is data row a list that represent original order of data set which doesn't/shouldn't sort + * again. */ private boolean isDataRowOrdered() { return (dataRows instanceof List); } - /** - * Convert a collection to list and sort and return this new list. - */ + /** Convert a collection to list and sort and return this new list. */ private static > List sort(Collection collection) { ArrayList list = new ArrayList<>(collection); Collections.sort(list); @@ -200,12 +188,16 @@ public boolean equals(final Object o) { final DBResult other = (DBResult) o; // H2 calculates the value before setting column name // for example, for query "select 1 + 1" it returns a column named "2" instead of "1 + 1" - boolean skipColumnNameCheck = databaseName.equalsIgnoreCase("h2") || other.databaseName.equalsIgnoreCase("h2"); + boolean skipColumnNameCheck = + databaseName.equalsIgnoreCase("h2") || other.databaseName.equalsIgnoreCase("h2"); if (!skipColumnNameCheck && !schema.equals(other.schema)) { return false; } - if (skipColumnNameCheck && !schema.stream().map(Type::getType).collect(Collectors.toList()) - .equals(other.schema.stream().map(Type::getType).collect(Collectors.toList()))) { + if (skipColumnNameCheck + && !schema.stream() + .map(Type::getType) + .collect(Collectors.toList()) + .equals(other.schema.stream().map(Type::getType).collect(Collectors.toList()))) { return false; } return dataRows.equals(other.dataRows); diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java index da08487a10..973ea76e71 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.resultset; import java.math.BigDecimal; @@ -15,9 +14,7 @@ import lombok.Getter; import lombok.ToString; -/** - * Row in result set. - */ +/** Row in result set. */ @EqualsAndHashCode @ToString @Getter @@ -77,5 +74,4 @@ public int compareTo(Row other) { } return 0; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java index 23cc0e3347..d626f75ccb 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java @@ -3,25 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.resultset; import lombok.Data; -/** - * Column type in schema - */ +/** Column type in schema */ @Data public class Type { - /** - * Column name - */ + /** Column name */ private final String name; - /** - * Column type - */ + /** Column type */ private final String type; - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java index 03c3967544..5cab5b3175 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Arrays.asList; @@ -29,37 +28,37 @@ import org.opensearch.sql.correctness.runner.resultset.Type; import org.opensearch.sql.correctness.testset.TestQuerySet; -/** - * Tests for {@link ComparisonTest} - */ +/** Tests for {@link ComparisonTest} */ @RunWith(MockitoJUnitRunner.class) public class ComparisonTestTest { - @Mock - private DBConnection openSearchConnection; + @Mock private DBConnection openSearchConnection; - @Mock - private DBConnection otherDbConnection; + @Mock private DBConnection otherDbConnection; private ComparisonTest correctnessTest; @Before public void setUp() { when(otherDbConnection.getDatabaseName()).thenReturn("Other"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection} - ); + correctnessTest = + new ComparisonTest(openSearchConnection, new DBConnection[] {otherDbConnection}); } @Test public void testSuccess() { - when(openSearchConnection.select(anyString())).thenReturn( - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))) - ); - when(otherDbConnection.select(anyString())).thenReturn( - new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("John")))) - ); + when(openSearchConnection.select(anyString())) + .thenReturn( + new DBResult( + "OpenSearch", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); + when(otherDbConnection.select(anyString())) + .thenReturn( + new DBResult( + "Other DB", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); TestReport expected = new TestReport(); expected.addTestCase(new SuccessTestCase(1, "SELECT * FROM accounts")); @@ -70,15 +69,18 @@ public void testSuccess() { @Test public void testFailureDueToInconsistency() { DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); - DBResult otherDbResult = new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("JOHN")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherDbResult = + new DBResult( + "Other DB", asList(new Type("firstname", "text")), asList(new Row(asList("JOHN")))); when(openSearchConnection.select(anyString())).thenReturn(openSearchResult); when(otherDbConnection.select(anyString())).thenReturn(otherDbResult); TestReport expected = new TestReport(); expected.addTestCase( - new FailedTestCase(1, "SELECT * FROM accounts", asList(openSearchResult, otherDbResult), "")); + new FailedTestCase( + 1, "SELECT * FROM accounts", asList(openSearchResult, otherDbResult), "")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -87,16 +89,19 @@ public void testFailureDueToInconsistency() { public void testSuccessFinally() { DBConnection anotherDbConnection = mock(DBConnection.class); when(anotherDbConnection.getDatabaseName()).thenReturn("Another"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection} - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection}); DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); - DBResult otherDbResult = new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("JOHN")))); - DBResult anotherDbResult = new DBResult("Another DB", asList(new Type("firstname", "text")), - asList(new Row(asList("John")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherDbResult = + new DBResult( + "Other DB", asList(new Type("firstname", "text")), asList(new Row(asList("JOHN")))); + DBResult anotherDbResult = + new DBResult( + "Another DB", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); when(openSearchConnection.select(anyString())).thenReturn(openSearchResult); when(anotherDbConnection.select(anyString())).thenReturn(anotherDbResult); @@ -111,30 +116,38 @@ public void testFailureDueToEventualInconsistency() { DBConnection anotherDbConnection = mock(DBConnection.class); when(anotherDbConnection.getDatabaseName()) .thenReturn("ZZZ DB"); // Make sure this will be called after Other DB - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection} - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection}); DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); - DBResult otherDbResult = new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("JOHN")))); - DBResult anotherDbResult = new DBResult("ZZZ DB", asList(new Type("firstname", "text")), - asList(new Row(asList("Hank")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherDbResult = + new DBResult( + "Other DB", asList(new Type("firstname", "text")), asList(new Row(asList("JOHN")))); + DBResult anotherDbResult = + new DBResult( + "ZZZ DB", asList(new Type("firstname", "text")), asList(new Row(asList("Hank")))); when(openSearchConnection.select(anyString())).thenReturn(openSearchResult); when(otherDbConnection.select(anyString())).thenReturn(otherDbResult); when(anotherDbConnection.select(anyString())).thenReturn(anotherDbResult); TestReport expected = new TestReport(); - expected.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", - asList(openSearchResult, otherDbResult, anotherDbResult), "")); + expected.addTestCase( + new FailedTestCase( + 1, + "SELECT * FROM accounts", + asList(openSearchResult, otherDbResult, anotherDbResult), + "")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @Test public void testErrorDueToESException() { - when(openSearchConnection.select(anyString())).thenThrow(new RuntimeException("All shards failure")); + when(openSearchConnection.select(anyString())) + .thenThrow(new RuntimeException("All shards failure")); TestReport expected = new TestReport(); expected.addTestCase( @@ -145,15 +158,21 @@ public void testErrorDueToESException() { @Test public void testErrorDueToNoOtherDBSupportThisQuery() { - when(openSearchConnection.select(anyString())).thenReturn( - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))) - ); + when(openSearchConnection.select(anyString())) + .thenReturn( + new DBResult( + "OpenSearch", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); when(otherDbConnection.select(anyString())) .thenThrow(new RuntimeException("Unsupported feature")); TestReport expected = new TestReport(); - expected.addTestCase(new ErrorTestCase(1, "SELECT * FROM accounts", - "No other databases support this query: Unsupported feature;")); + expected.addTestCase( + new ErrorTestCase( + 1, + "SELECT * FROM accounts", + "No other databases support this query: Unsupported feature;")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -162,17 +181,22 @@ public void testErrorDueToNoOtherDBSupportThisQuery() { public void testSuccessWhenOneDBSupportThisQuery() { DBConnection anotherDbConnection = mock(DBConnection.class); when(anotherDbConnection.getDatabaseName()).thenReturn("Another"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection} - ); - - when(openSearchConnection.select(anyString())).thenReturn( - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))) - ); - when(anotherDbConnection.select(anyString())).thenReturn( - new DBResult("Another DB", asList(new Type("firstname", "text")), - asList(new Row(asList("John")))) - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection}); + + when(openSearchConnection.select(anyString())) + .thenReturn( + new DBResult( + "OpenSearch", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); + when(anotherDbConnection.select(anyString())) + .thenReturn( + new DBResult( + "Another DB", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); TestReport expected = new TestReport(); expected.addTestCase(new SuccessTestCase(1, "SELECT * FROM accounts")); @@ -184,12 +208,13 @@ public void testSuccessWhenOneDBSupportThisQuery() { public void testFailureDueToInconsistencyAndExceptionMixed() { DBConnection otherDBConnection2 = mock(DBConnection.class); when(otherDBConnection2.getDatabaseName()).thenReturn("ZZZ DB"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, otherDBConnection2} - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, otherDBConnection2}); DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); DBResult otherResult = new DBResult("Other", asList(new Type("firstname", "text")), Collections.emptyList()); @@ -199,8 +224,12 @@ public void testFailureDueToInconsistencyAndExceptionMixed() { .thenThrow(new RuntimeException("Unsupported feature")); TestReport expected = new TestReport(); - expected.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", - asList(openSearchResult, otherResult), "Unsupported feature;")); + expected.addTestCase( + new FailedTestCase( + 1, + "SELECT * FROM accounts", + asList(openSearchResult, otherResult), + "Unsupported feature;")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -208,5 +237,4 @@ public void testFailureDueToInconsistencyAndExceptionMixed() { private TestQuerySet querySet(String query) { return new TestQuerySet(new String[] {query}); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java index 3f6da0c39d..793728a9e9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Collections.emptyList; @@ -19,9 +18,7 @@ import org.opensearch.sql.correctness.runner.resultset.Row; import org.opensearch.sql.correctness.runner.resultset.Type; -/** - * Unit tests for {@link DBResult} - */ +/** Unit tests for {@link DBResult} */ public class DBResultTest { @Test @@ -80,35 +77,45 @@ public void dbResultWithDifferentColumnTypeShouldNotEqual() { @Test public void shouldExplainColumnTypeDifference() { - DBResult result1 = new DBResult("DB 1", - Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "FLOAT")), emptyList()); - DBResult result2 = new DBResult("DB 2", - Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "INT")), emptyList()); + DBResult result1 = + new DBResult( + "DB 1", + Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "FLOAT")), + emptyList()); + DBResult result2 = + new DBResult( + "DB 2", + Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "INT")), + emptyList()); assertEquals( "Schema type at [1] is different: " + "this=[Type(name=age, type=FLOAT)], other=[Type(name=age, type=INT)]", - result1.diff(result2) - ); + result1.diff(result2)); } @Test public void shouldExplainDataRowsDifference() { - DBResult result1 = new DBResult("DB 1", Arrays.asList(new Type("name", "VARCHAR")), - Sets.newHashSet( - new Row(Arrays.asList("hello")), - new Row(Arrays.asList("world")), - new Row(Lists.newArrayList((Object) null)))); - DBResult result2 = new DBResult("DB 2",Arrays.asList(new Type("name", "VARCHAR")), - Sets.newHashSet( - new Row(Lists.newArrayList((Object) null)), - new Row(Arrays.asList("hello")), - new Row(Arrays.asList("world123")))); + DBResult result1 = + new DBResult( + "DB 1", + Arrays.asList(new Type("name", "VARCHAR")), + Sets.newHashSet( + new Row(Arrays.asList("hello")), + new Row(Arrays.asList("world")), + new Row(Lists.newArrayList((Object) null)))); + DBResult result2 = + new DBResult( + "DB 2", + Arrays.asList(new Type("name", "VARCHAR")), + Sets.newHashSet( + new Row(Lists.newArrayList((Object) null)), + new Row(Arrays.asList("hello")), + new Row(Arrays.asList("world123")))); assertEquals( "Data row at [1] is different: this=[Row(values=[world])], other=[Row(values=[world123])]", - result1.diff(result2) - ); + result1.diff(result2)); } @Test @@ -125,8 +132,6 @@ public void shouldExplainDataRowsOrderDifference() { assertEquals( "Data row at [0] is different: this=[Row(values=[hello])], other=[Row(values=[world])]", - result1.diff(result2) - ); + result1.diff(result2)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java index 0e70066136..a8e01145e7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.junit.Assert.assertEquals; @@ -36,17 +35,13 @@ import org.opensearch.sql.correctness.runner.resultset.DBResult; import org.opensearch.sql.correctness.runner.resultset.Type; -/** - * Tests for {@link JDBCConnection} - */ +/** Tests for {@link JDBCConnection} */ @RunWith(MockitoJUnitRunner.class) public class JDBCConnectionTest { - @Mock - private Connection connection; + @Mock private Connection connection; - @Mock - private Statement statement; + @Mock private Statement statement; private JDBCConnection conn; @@ -60,7 +55,8 @@ public void setUp() throws SQLException { @Test public void testCreateTable() throws SQLException { - conn.create("test", + conn.create( + "test", "{\"mappings\":{\"properties\":{\"name\":{\"type\":\"keyword\"},\"age\":{\"type\":\"INT\"}}}}"); ArgumentCaptor argCap = ArgumentCaptor.forClass(String.class); @@ -83,7 +79,9 @@ public void testDropTable() throws SQLException { @Test public void testInsertData() throws SQLException { - conn.insert("test", new String[] {"name", "age"}, + conn.insert( + "test", + new String[] {"name", "age"}, Arrays.asList(new String[] {"John", "25"}, new String[] {"Hank", "30"})); ArgumentCaptor argCap = ArgumentCaptor.forClass(String.class); @@ -93,18 +91,17 @@ public void testInsertData() throws SQLException { assertEquals( Arrays.asList( "INSERT INTO test(`name`,`age`) VALUES ('John','25')", - "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')" - ), actual - ); + "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')"), + actual); } @Test public void testInsertNullData() throws SQLException { - conn.insert("test", new String[] {"name", "age"}, + conn.insert( + "test", + new String[] {"name", "age"}, Arrays.asList( - new Object[] {"John", null}, - new Object[] {null, 25}, - new Object[] {"Hank", 30})); + new Object[] {"John", null}, new Object[] {null, 25}, new Object[] {"Hank", 30})); ArgumentCaptor argCap = ArgumentCaptor.forClass(String.class); verify(statement, times(3)).addBatch(argCap.capture()); @@ -114,9 +111,8 @@ public void testInsertNullData() throws SQLException { Arrays.asList( "INSERT INTO test(`name`,`age`) VALUES ('John',NULL)", "INSERT INTO test(`name`,`age`) VALUES (NULL,'25')", - "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')" - ), actual - ); + "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')"), + actual); } @Test @@ -129,19 +125,10 @@ public void testSelectQuery() throws SQLException { DBResult result = conn.select("SELECT * FROM test"); assertEquals("Test DB", result.getDatabaseName()); assertEquals( - Arrays.asList( - new Type("NAME", "VARCHAR"), - new Type("AGE", "INT") - ), - result.getSchema() - ); + Arrays.asList(new Type("NAME", "VARCHAR"), new Type("AGE", "INT")), result.getSchema()); assertEquals( - HashMultiset.create(ImmutableList.of( - Arrays.asList("John", 25), - Arrays.asList("Hank", 30) - )), - result.getDataRows() - ); + HashMultiset.create(ImmutableList.of(Arrays.asList("John", 25), Arrays.asList("Hank", 30))), + result.getDataRows()); } @Test @@ -153,24 +140,18 @@ public void testSelectQueryWithAlias() throws SQLException { when(resultSet.getMetaData()).thenReturn(metaData); DBResult result = conn.select("SELECT * FROM test"); - assertEquals( - Arrays.asList( - new Type("N", "VARCHAR"), - new Type("A", "INT") - ), - result.getSchema() - ); + assertEquals(Arrays.asList(new Type("N", "VARCHAR"), new Type("A", "INT")), result.getSchema()); } @Test public void testSelectQueryWithFloatInResultSet() throws SQLException { ResultSetMetaData metaData = mockMetaData(ImmutableMap.of("name", "VARCHAR", "balance", "FLOAT")); - ResultSet resultSet = mockResultSet( - new Object[] {"John", 25.123}, - new Object[] {"Hank", 30.456}, - new Object[] {"Allen", 15.1} - ); + ResultSet resultSet = + mockResultSet( + new Object[] {"John", 25.123}, + new Object[] {"Hank", 30.456}, + new Object[] {"Allen", 15.1}); when(statement.executeQuery(anyString())).thenReturn(resultSet); when(resultSet.getMetaData()).thenReturn(metaData); @@ -178,18 +159,15 @@ public void testSelectQueryWithFloatInResultSet() throws SQLException { assertEquals( Arrays.asList( new Type("NAME", "VARCHAR"), - new Type("BALANCE", "[FLOAT, DOUBLE, REAL, DOUBLE PRECISION, DECFLOAT]") - ), - result.getSchema() - ); + new Type("BALANCE", "[FLOAT, DOUBLE, REAL, DOUBLE PRECISION, DECFLOAT]")), + result.getSchema()); assertEquals( - HashMultiset.create(ImmutableList.of( - Arrays.asList("John", 25.13), - Arrays.asList("Hank", 30.46), - Arrays.asList("Allen", 15.1) - )), - result.getDataRows() - ); + HashMultiset.create( + ImmutableList.of( + Arrays.asList("John", 25.13), + Arrays.asList("Hank", 30.46), + Arrays.asList("Allen", 15.1))), + result.getDataRows()); } private ResultSet mockResultSet(Object[]... rows) throws SQLException { @@ -233,5 +211,4 @@ private ResultSetMetaData mockMetaData(Map nameAndTypes, String. when(metaData.getColumnCount()).thenReturn(nameAndTypes.size()); return metaData; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java index 5b33884814..49d8f01651 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.junit.Assert.assertEquals; @@ -30,14 +29,11 @@ import org.opensearch.client.RestClient; import org.opensearch.sql.correctness.runner.connection.OpenSearchConnection; -/** - * Tests for {@link OpenSearchConnection} - */ +/** Tests for {@link OpenSearchConnection} */ @RunWith(MockitoJUnitRunner.class) public class OpenSearchConnectionTest { - @Mock - private RestClient client; + @Mock private RestClient client; private OpenSearchConnection conn; @@ -63,36 +59,29 @@ public void testCreateTable() throws IOException { @Test public void testInsertData() throws IOException { - conn.insert("test", new String[] {"name"}, - Arrays.asList(new String[] {"John"}, new String[] {"Hank"})); + conn.insert( + "test", new String[] {"name"}, Arrays.asList(new String[] {"John"}, new String[] {"Hank"})); Request actual = captureActualArg(); assertEquals("POST", actual.getMethod()); assertEquals("/test/_bulk?refresh=true", actual.getEndpoint()); assertEquals( - "{\"index\":{}}\n" - + "{\"name\":\"John\"}\n" - + "{\"index\":{}}\n" - + "{\"name\":\"Hank\"}\n", - getBody(actual) - ); + "{\"index\":{}}\n{\"name\":\"John\"}\n{\"index\":{}}\n{\"name\":\"Hank\"}\n", + getBody(actual)); } @Test public void testInsertNullData() throws IOException { - conn.insert("test", new String[] {"name", "age"}, + conn.insert( + "test", + new String[] {"name", "age"}, Arrays.asList(new Object[] {null, 30}, new Object[] {"Hank", null})); Request actual = captureActualArg(); assertEquals("POST", actual.getMethod()); assertEquals("/test/_bulk?refresh=true", actual.getEndpoint()); assertEquals( - "{\"index\":{}}\n" - + "{\"age\":30}\n" - + "{\"index\":{}}\n" - + "{\"name\":\"Hank\"}\n", - getBody(actual) - ); + "{\"index\":{}}\n{\"age\":30}\n{\"index\":{}}\n{\"name\":\"Hank\"}\n", getBody(actual)); } @Test @@ -114,5 +103,4 @@ private String getBody(Request request) throws IOException { InputStream inputStream = request.getEntity().getContent(); return CharStreams.toString(new InputStreamReader(inputStream)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java index 66cc1a0500..79e134fe7b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.junit.Assert.assertEquals; @@ -12,9 +11,7 @@ import org.junit.Test; import org.opensearch.sql.correctness.runner.resultset.Row; -/** - * Unit test {@link Row} - */ +/** Unit test {@link Row} */ public class RowTest { @Test @@ -47,5 +44,4 @@ public void shouldConsiderNullGreater() { row2.add("world"); assertEquals(1, row1.compareTo(row2)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java index 1abe6ea109..daf084d371 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Collections.emptyMap; @@ -18,9 +17,7 @@ import org.junit.Test; import org.opensearch.sql.correctness.TestConfig; -/** - * Tests for {@link TestConfig} - */ +/** Tests for {@link TestConfig} */ public class TestConfigTest { @Test @@ -31,9 +28,7 @@ public void testDefaultConfig() { config.getOtherDbConnectionNameAndUrls(), allOf( hasEntry("H2", "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1"), - hasEntry("SQLite", "jdbc:sqlite::memory:") - ) - ); + hasEntry("SQLite", "jdbc:sqlite::memory:"))); } @Test @@ -45,18 +40,16 @@ public void testCustomESUrls() { @Test public void testCustomDbUrls() { - Map args = ImmutableMap.of("otherDbUrls", - "H2=jdbc:h2:mem:test;DB_CLOSE_DELAY=-1," - + "Derby=jdbc:derby:memory:myDb;create=true"); + Map args = + ImmutableMap.of( + "otherDbUrls", + "H2=jdbc:h2:mem:test;DB_CLOSE_DELAY=-1,Derby=jdbc:derby:memory:myDb;create=true"); TestConfig config = new TestConfig(args); assertThat( config.getOtherDbConnectionNameAndUrls(), allOf( hasEntry("H2", "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1"), - hasEntry("Derby", "jdbc:derby:memory:myDb;create=true") - ) - ); + hasEntry("Derby", "jdbc:derby:memory:myDb;create=true"))); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java index 3967d96658..7411df6a54 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.hamcrest.MatcherAssert.assertThat; @@ -13,9 +12,7 @@ import org.junit.Test; import org.opensearch.sql.correctness.testset.TestDataSet; -/** - * Tests for {@link TestDataSet} - */ +/** Tests for {@link TestDataSet} */ public class TestDataSetTest { @Test @@ -40,9 +37,7 @@ public void testDataSetWithSingleColumnData() { new Object[] {"field"}, new Object[] {"hello"}, new Object[] {"world"}, - new Object[] {"123"} - ) - ); + new Object[] {"123"})); } @Test @@ -61,16 +56,13 @@ public void testDataSetWithMultiColumnsData() { + " }\n" + "}"; - TestDataSet dataSet = new TestDataSet("test", mappings, - "field1,field2\nhello,123\nworld,456"); + TestDataSet dataSet = new TestDataSet("test", mappings, "field1,field2\nhello,123\nworld,456"); assertThat( dataSet.getDataRows(), contains( new Object[] {"field1", "field2"}, new Object[] {"hello", 123}, - new Object[] {"world", 456} - ) - ); + new Object[] {"world", 456})); } @Test @@ -86,17 +78,15 @@ public void testDataSetWithEscapedComma() { + " }\n" + "}"; - TestDataSet dataSet = new TestDataSet("test", mappings, - "field\n\"hello,world,123\"\n123\n\"[abc,def,ghi]\""); + TestDataSet dataSet = + new TestDataSet("test", mappings, "field\n\"hello,world,123\"\n123\n\"[abc,def,ghi]\""); assertThat( dataSet.getDataRows(), contains( new Object[] {"field"}, new Object[] {"hello,world,123"}, new Object[] {"123"}, - new Object[] {"[abc,def,ghi]"} - ) - ); + new Object[] {"[abc,def,ghi]"})); } @Test @@ -115,17 +105,13 @@ public void testDataSetWithNullData() { + " }\n" + "}"; - TestDataSet dataSet = new TestDataSet("test", mappings, - "field1,field2\n,123\nworld,\n,"); + TestDataSet dataSet = new TestDataSet("test", mappings, "field1,field2\n,123\nworld,\n,"); assertThat( dataSet.getDataRows(), contains( new Object[] {"field1", "field2"}, new Object[] {null, 123}, new Object[] {"world", null}, - new Object[] {null, null} - ) - ); + new Object[] {null, null})); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java index 1c97f743f4..08d360dfc7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.hamcrest.MatcherAssert.assertThat; @@ -12,22 +11,13 @@ import org.junit.Test; import org.opensearch.sql.correctness.testset.TestQuerySet; -/** - * Tests for {@link TestQuerySet} - */ +/** Tests for {@link TestQuerySet} */ public class TestQuerySetTest { @Test public void testQuerySet() { TestQuerySet querySet = new TestQuerySet("SELECT * FROM accounts\nSELECT * FROM accounts LIMIT 5"); - assertThat( - querySet, - contains( - "SELECT * FROM accounts", - "SELECT * FROM accounts LIMIT 5" - ) - ); + assertThat(querySet, contains("SELECT * FROM accounts", "SELECT * FROM accounts LIMIT 5")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java index 35b64fd5d6..9ac5151b21 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Arrays.asList; @@ -20,9 +19,7 @@ import org.opensearch.sql.correctness.runner.resultset.Row; import org.opensearch.sql.correctness.runner.resultset.Type; -/** - * Test for {@link TestReport} - */ +/** Test for {@link TestReport} */ public class TestReportTest { private TestReport report = new TestReport(); @@ -31,22 +28,22 @@ public class TestReportTest { public void testSuccessReport() { report.addTestCase(new SuccessTestCase(1, "SELECT * FROM accounts")); JSONObject actual = new JSONObject(report); - JSONObject expected = new JSONObject( - "{" + - " \"summary\": {" + - " \"total\": 1," + - " \"success\": 1," + - " \"failure\": 0" + - " }," + - " \"tests\": [" + - " {" + - " \"id\": 1," + - " \"result\": 'Success'," + - " \"sql\": \"SELECT * FROM accounts\"," + - " }" + - " ]" + - "}" - ); + JSONObject expected = + new JSONObject( + "{" + + " \"summary\": {" + + " \"total\": 1," + + " \"success\": 1," + + " \"failure\": 0" + + " }," + + " \"tests\": [" + + " {" + + " \"id\": 1," + + " \"result\": 'Success'," + + " \"sql\": \"SELECT * FROM accounts\"," + + " }" + + " ]" + + "}"); if (!actual.similar(expected)) { fail("Actual JSON is different from expected: " + actual.toString(2)); @@ -55,54 +52,63 @@ public void testSuccessReport() { @Test public void testFailedReport() { - report.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", asList( - new DBResult("OpenSearch", singleton(new Type("firstName", "text")), - singleton(new Row(asList("hello")))), - new DBResult("H2", singleton(new Type("firstName", "text")), - singleton(new Row(asList("world"))))), - "[SQLITE_ERROR] SQL error or missing database;" - )); + report.addTestCase( + new FailedTestCase( + 1, + "SELECT * FROM accounts", + asList( + new DBResult( + "OpenSearch", + singleton(new Type("firstName", "text")), + singleton(new Row(asList("hello")))), + new DBResult( + "H2", + singleton(new Type("firstName", "text")), + singleton(new Row(asList("world"))))), + "[SQLITE_ERROR] SQL error or missing database;")); JSONObject actual = new JSONObject(report); - JSONObject expected = new JSONObject( - "{" + - " \"summary\": {" + - " \"total\": 1," + - " \"success\": 0," + - " \"failure\": 1" + - " }," + - " \"tests\": [" + - " {" + - " \"id\": 1," + - " \"result\": 'Failed'," + - " \"sql\": \"SELECT * FROM accounts\"," + - " \"explain\": \"Data row at [0] is different: this=[Row(values=[world])], other=[Row(values=[hello])]\"," + - " \"errors\": \"[SQLITE_ERROR] SQL error or missing database;\"," + - " \"resultSets\": [" + - " {" + - " \"database\": \"H2\"," + - " \"schema\": [" + - " {" + - " \"name\": \"firstName\"," + - " \"type\": \"text\"" + - " }" + - " ]," + - " \"dataRows\": [[\"world\"]]" + - " }," + - " {" + - " \"database\": \"OpenSearch\"," + - " \"schema\": [" + - " {" + - " \"name\": \"firstName\"," + - " \"type\": \"text\"" + - " }" + - " ]," + - " \"dataRows\": [[\"hello\"]]" + - " }" + - " ]" + - " }" + - " ]" + - "}" - ); + JSONObject expected = + new JSONObject( + "{" + + " \"summary\": {" + + " \"total\": 1," + + " \"success\": 0," + + " \"failure\": 1" + + " }," + + " \"tests\": [" + + " {" + + " \"id\": 1," + + " \"result\": 'Failed'," + + " \"sql\": \"SELECT * FROM accounts\"," + + " \"explain\": \"Data row at [0] is different: " + + "this=[Row(values=[world])], " + + "other=[Row(values=[hello])]\"," + + " \"errors\": \"[SQLITE_ERROR] SQL error or missing database;\"," + + " \"resultSets\": [" + + " {" + + " \"database\": \"H2\"," + + " \"schema\": [" + + " {" + + " \"name\": \"firstName\"," + + " \"type\": \"text\"" + + " }" + + " ]," + + " \"dataRows\": [[\"world\"]]" + + " }," + + " {" + + " \"database\": \"OpenSearch\"," + + " \"schema\": [" + + " {" + + " \"name\": \"firstName\"," + + " \"type\": \"text\"" + + " }" + + " ]," + + " \"dataRows\": [[\"hello\"]]" + + " }" + + " ]" + + " }" + + " ]" + + "}"); if (!actual.similar(expected)) { fail("Actual JSON is different from expected: " + actual.toString(2)); @@ -113,27 +119,26 @@ public void testFailedReport() { public void testErrorReport() { report.addTestCase(new ErrorTestCase(1, "SELECT * FROM", "Missing table name in query")); JSONObject actual = new JSONObject(report); - JSONObject expected = new JSONObject( - "{" + - " \"summary\": {" + - " \"total\": 1," + - " \"success\": 0," + - " \"failure\": 1" + - " }," + - " \"tests\": [" + - " {" + - " \"id\": 1," + - " \"result\": 'Failed'," + - " \"sql\": \"SELECT * FROM\"," + - " \"reason\": \"Missing table name in query\"," + - " }" + - " ]" + - "}" - ); + JSONObject expected = + new JSONObject( + "{" + + " \"summary\": {" + + " \"total\": 1," + + " \"success\": 0," + + " \"failure\": 1" + + " }," + + " \"tests\": [" + + " {" + + " \"id\": 1," + + " \"result\": 'Failed'," + + " \"sql\": \"SELECT * FROM\"," + + " \"reason\": \"Missing table name in query\"," + + " }" + + " ]" + + "}"); if (!actual.similar(expected)) { fail("Actual JSON is different from expected: " + actual.toString(2)); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java index 0bc5456069..367e2e10bf 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import org.junit.runner.RunWith; @@ -11,15 +10,14 @@ @RunWith(Suite.class) @Suite.SuiteClasses({ - ComparisonTestTest.class, - TestConfigTest.class, - TestDataSetTest.class, - TestQuerySetTest.class, - TestReportTest.class, - OpenSearchConnectionTest.class, - JDBCConnectionTest.class, - DBResultTest.class, - RowTest.class, + ComparisonTestTest.class, + TestConfigTest.class, + TestDataSetTest.class, + TestQuerySetTest.class, + TestReportTest.class, + OpenSearchConnectionTest.class, + JDBCConnectionTest.class, + DBResultTest.class, + RowTest.class, }) -public class UnitTests { -} +public class UnitTests {} diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java index 66fc7c88af..3872834b26 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.testset; import static java.util.stream.Collectors.joining; @@ -15,9 +14,7 @@ import org.json.JSONObject; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Test data set - */ +/** Test data set */ public class TestDataSet { private final String tableName; @@ -42,9 +39,7 @@ public List getDataRows() { return dataRows; } - /** - * Split columns in each line by separator and ignore escaped separator(s) in quoted string. - */ + /** Split columns in each line by separator and ignore escaped separator(s) in quoted string. */ private List splitColumns(String content, char separator) { List result = new ArrayList<>(); for (String line : content.split("\\r?\\n")) { @@ -76,8 +71,8 @@ private List splitColumns(String content, char separator) { } /** - * Convert column string values (read from CSV file) to objects of its real type - * based on the type information in index mapping file. + * Convert column string values (read from CSV file) to objects of its real type based on the type + * information in index mapping file. */ private List convertStringDataToActualType(List rows) { JSONObject types = new JSONObject(schema); @@ -93,7 +88,8 @@ private List convertStringDataToActualType(List rows) { return result; } - private Object[] convertStringArrayToObjectArray(JSONObject types, String[] columnNames, String[] row) { + private Object[] convertStringArrayToObjectArray( + JSONObject types, String[] columnNames, String[] row) { Object[] result = new Object[row.length]; for (int i = 0; i < row.length; i++) { String colName = columnNames[i]; @@ -126,22 +122,17 @@ private Object convertStringToObject(String type, String str) { case "boolean": return Boolean.valueOf(str); default: - throw new IllegalStateException(StringUtils.format( - "Data type %s is not supported yet for value: %s", type, str)); + throw new IllegalStateException( + StringUtils.format("Data type %s is not supported yet for value: %s", type, str)); } } @Override public String toString() { int total = dataRows.size(); - return "Test data set :\n" - + " Table name: " + tableName + '\n' - + " Schema: " + schema + '\n' - + " Data rows (first 5 in " + total + "):" - + dataRows.stream(). - limit(5). - map(Arrays::toString). - collect(joining("\n ", "\n ", "\n")); + return String.format( + "Test data set:\n Table name: %s\n Schema: %s\n Data rows (first 5 in %d):", + tableName, schema, total) + + dataRows.stream().limit(5).map(Arrays::toString).collect(joining("\n ", "\n ", "\n")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java index 7eee2cde9f..161d314c1d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.testset; import static java.util.stream.Collectors.joining; @@ -12,9 +11,7 @@ import java.util.Iterator; import java.util.List; -/** - * Test query set including SQL queries for comparison testing. - */ +/** Test query set including SQL queries for comparison testing. */ public class TestQuerySet implements Iterable { private List queries; @@ -49,10 +46,9 @@ private List lines(String content) { @Override public String toString() { int total = queries.size(); - return "SQL queries (first 5 in " + total + "):" - + queries.stream(). - limit(5). - collect(joining("\n ", "\n ", "\n")); + return "SQL queries (first 5 in " + + total + + "):" + + queries.stream().limit(5).collect(joining("\n ", "\n ", "\n")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java b/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java index 86af85727d..e1d071d522 100644 --- a/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java @@ -47,130 +47,155 @@ protected static void deleteDataSourcesCreated() throws IOException { @SneakyThrows @Test public void createDataSourceAPITest() { - //create datasource + // create datasource DataSourceMetadata createDSM = - new DataSourceMetadata("create_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090", - "prometheus.auth.type","basicauth", - "prometheus.auth.username", "username", - "prometheus.auth.password", "password")); + new DataSourceMetadata( + "create_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of( + "prometheus.uri", + "https://localhost:9090", + "prometheus.auth.type", + "basicauth", + "prometheus.auth.username", + "username", + "prometheus.auth.password", + "password")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); String createResponseString = getResponseBody(response); Assert.assertEquals("Created DataSource with name create_prometheus", createResponseString); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); - //get datasource to validate the creation. + // get datasource to validate the creation. Request getRequest = getFetchDataSourceRequest("create_prometheus"); Response getResponse = client().performRequest(getRequest); Assert.assertEquals(200, getResponse.getStatusLine().getStatusCode()); String getResponseString = getResponseBody(getResponse); DataSourceMetadata dataSourceMetadata = new Gson().fromJson(getResponseString, DataSourceMetadata.class); - Assert.assertEquals("https://localhost:9090", - dataSourceMetadata.getProperties().get("prometheus.uri")); + Assert.assertEquals( + "https://localhost:9090", dataSourceMetadata.getProperties().get("prometheus.uri")); } - @SneakyThrows @Test public void updateDataSourceAPITest() { - //create datasource + // create datasource DataSourceMetadata createDSM = - new DataSourceMetadata("update_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090")); + new DataSourceMetadata( + "update_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); client().performRequest(createRequest); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); - //update datasource + // update datasource DataSourceMetadata updateDSM = - new DataSourceMetadata("update_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://randomtest.com:9090")); + new DataSourceMetadata( + "update_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://randomtest.com:9090")); Request updateRequest = getUpdateDataSourceRequest(updateDSM); Response updateResponse = client().performRequest(updateRequest); Assert.assertEquals(200, updateResponse.getStatusLine().getStatusCode()); String updateResponseString = getResponseBody(updateResponse); Assert.assertEquals("Updated DataSource with name update_prometheus", updateResponseString); - //Datasource is not immediately updated. so introducing a sleep of 2s. + // Datasource is not immediately updated. so introducing a sleep of 2s. Thread.sleep(2000); - //update datasource with invalid URI + // update datasource with invalid URI updateDSM = - new DataSourceMetadata("update_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://randomtest:9090")); - final Request illFormedUpdateRequest - = getUpdateDataSourceRequest(updateDSM); - ResponseException updateResponseException - = Assert.assertThrows(ResponseException.class, () -> client().performRequest(illFormedUpdateRequest)); + new DataSourceMetadata( + "update_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://randomtest:9090")); + final Request illFormedUpdateRequest = getUpdateDataSourceRequest(updateDSM); + ResponseException updateResponseException = + Assert.assertThrows( + ResponseException.class, () -> client().performRequest(illFormedUpdateRequest)); Assert.assertEquals(400, updateResponseException.getResponse().getStatusLine().getStatusCode()); updateResponseString = getResponseBody(updateResponseException.getResponse()); JsonObject errorMessage = new Gson().fromJson(updateResponseString, JsonObject.class); - Assert.assertEquals("Invalid hostname in the uri: https://randomtest:9090", + Assert.assertEquals( + "Invalid hostname in the uri: https://randomtest:9090", errorMessage.get("error").getAsJsonObject().get("details").getAsString()); Thread.sleep(2000); - //get datasource to validate the modification. - //get datasource + // get datasource to validate the modification. + // get datasource Request getRequest = getFetchDataSourceRequest("update_prometheus"); Response getResponse = client().performRequest(getRequest); Assert.assertEquals(200, getResponse.getStatusLine().getStatusCode()); String getResponseString = getResponseBody(getResponse); DataSourceMetadata dataSourceMetadata = new Gson().fromJson(getResponseString, DataSourceMetadata.class); - Assert.assertEquals("https://randomtest.com:9090", - dataSourceMetadata.getProperties().get("prometheus.uri")); + Assert.assertEquals( + "https://randomtest.com:9090", dataSourceMetadata.getProperties().get("prometheus.uri")); } - @SneakyThrows @Test public void deleteDataSourceTest() { - //create datasource for deletion + // create datasource for deletion DataSourceMetadata createDSM = - new DataSourceMetadata("delete_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090")); + new DataSourceMetadata( + "delete_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); client().performRequest(createRequest); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); - //delete datasource + // delete datasource Request deleteRequest = getDeleteDataSourceRequest("delete_prometheus"); Response deleteResponse = client().performRequest(deleteRequest); Assert.assertEquals(204, deleteResponse.getStatusLine().getStatusCode()); - //Datasource is not immediately deleted. so introducing a sleep of 2s. + // Datasource is not immediately deleted. so introducing a sleep of 2s. Thread.sleep(2000); - //get datasources to verify the deletion + // get datasources to verify the deletion final Request prometheusGetRequest = getFetchDataSourceRequest("delete_prometheus"); - ResponseException prometheusGetResponseException - = Assert.assertThrows(ResponseException.class, () -> client().performRequest(prometheusGetRequest)); - Assert.assertEquals( 400, prometheusGetResponseException.getResponse().getStatusLine().getStatusCode()); - String prometheusGetResponseString = getResponseBody(prometheusGetResponseException.getResponse()); + ResponseException prometheusGetResponseException = + Assert.assertThrows( + ResponseException.class, () -> client().performRequest(prometheusGetRequest)); + Assert.assertEquals( + 400, prometheusGetResponseException.getResponse().getStatusLine().getStatusCode()); + String prometheusGetResponseString = + getResponseBody(prometheusGetResponseException.getResponse()); JsonObject errorMessage = new Gson().fromJson(prometheusGetResponseString, JsonObject.class); - Assert.assertEquals("DataSource with name: delete_prometheus doesn't exist.", + Assert.assertEquals( + "DataSource with name: delete_prometheus doesn't exist.", errorMessage.get("error").getAsJsonObject().get("details").getAsString()); - } @SneakyThrows @Test public void getAllDataSourceTest() { -//create datasource for deletion + // create datasource for deletion DataSourceMetadata createDSM = - new DataSourceMetadata("get_all_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090")); + new DataSourceMetadata( + "get_all_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); client().performRequest(createRequest); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); Request getRequest = getFetchDataSourceRequest(null); @@ -183,5 +208,4 @@ public void getAllDataSourceTest() { Assert.assertTrue( dataSourceMetadataList.stream().anyMatch(ds -> ds.getName().equals("get_all_prometheus"))); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java b/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java index 8c4959707a..a54d89dabe 100644 --- a/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java @@ -19,25 +19,33 @@ public class DatasourceClusterSettingsIT extends PPLIntegTestCase { private static final Logger LOG = LogManager.getLogger(); + @Test public void testGetDatasourceClusterSettings() throws IOException { JSONObject clusterSettings = getAllClusterSettings(); - assertThat(clusterSettings.query("/defaults/plugins.query.datasources.encryption.masterkey"), + assertThat( + clusterSettings.query("/defaults/plugins.query.datasources.encryption.masterkey"), equalTo(null)); } - @Test public void testPutDatasourceClusterSettings() throws IOException { final ResponseException exception = - expectThrows(ResponseException.class, () -> updateClusterSettings(new ClusterSetting(PERSISTENT, - "plugins.query.datasources.encryption.masterkey", - "masterkey"))); + expectThrows( + ResponseException.class, + () -> + updateClusterSettings( + new ClusterSetting( + PERSISTENT, + "plugins.query.datasources.encryption.masterkey", + "masterkey"))); JSONObject resp = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); assertThat(resp.getInt("status"), equalTo(400)); - assertThat(resp.query("/error/root_cause/0/reason"), - equalTo("final persistent setting [plugins.query.datasources.encryption.masterkey], not updateable")); + assertThat( + resp.query("/error/root_cause/0/reason"), + equalTo( + "final persistent setting [plugins.query.datasources.encryption.masterkey], not" + + " updateable")); assertThat(resp.query("/error/type"), equalTo("settings_exception")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java index 959621dbad..325c81107f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java @@ -66,12 +66,14 @@ public void init() { public static void initConnection() { var driverFile = System.getProperty("jdbcFile"); if (driverFile != null) { - URLClassLoader loader = new URLClassLoader( - new URL[]{new File(driverFile).toURI().toURL()}, - ClassLoader.getSystemClassLoader() - ); - Driver driver = (Driver) Class.forName("org.opensearch.jdbc.Driver", true, loader) - .getDeclaredConstructor().newInstance(); + URLClassLoader loader = + new URLClassLoader( + new URL[] {new File(driverFile).toURI().toURL()}, ClassLoader.getSystemClassLoader()); + Driver driver = + (Driver) + Class.forName("org.opensearch.jdbc.Driver", true, loader) + .getDeclaredConstructor() + .newInstance(); connection = driver.connect(getConnectionString(), null); } else { connection = DriverManager.getConnection(getConnectionString()); @@ -93,7 +95,8 @@ public static void closeConnection() { @SneakyThrows public void check_driver_version() { var version = System.getProperty("jdbcDriverVersion"); - Assume.assumeTrue("Parameter `jdbcDriverVersion` is not given, test platform uses default driver version", + Assume.assumeTrue( + "Parameter `jdbcDriverVersion` is not given, test platform uses default driver version", version != null); assertEquals(version, connection.getMetaData().getDriverVersion()); } @@ -103,11 +106,12 @@ public void check_driver_version() { public void select_all_no_cursor() { Statement stmt = connection.createStatement(); - for (var table : List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { + for (var table : + List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { var query = String.format("SELECT * FROM %s", table); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + while (rs.next()) rows++; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -119,11 +123,13 @@ public void select_all_no_cursor() { public void select_count_all_no_cursor() { Statement stmt = connection.createStatement(); - for (var table : List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { + for (var table : + List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { var query = String.format("SELECT COUNT(*) FROM %s", table); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -140,7 +146,8 @@ public void select_all_small_table_big_cursor() { stmt.setFetchSize(200); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -157,7 +164,8 @@ public void select_all_small_table_small_cursor() { stmt.setFetchSize(3); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -174,7 +182,8 @@ public void select_all_big_table_small_cursor() { stmt.setFetchSize(10); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -191,16 +200,15 @@ public void select_all_big_table_big_cursor() { stmt.setFetchSize(500); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); } } - /** - * Use OpenSearch cluster initialized by OpenSearch Gradle task. - */ + /** Use OpenSearch cluster initialized by OpenSearch Gradle task. */ private static String getConnectionString() { // string like "[::1]:46751,127.0.0.1:34403" var clusterUrls = System.getProperty("tests.rest.cluster").split(","); @@ -211,7 +219,8 @@ private static String getConnectionString() { protected JSONObject executeRestQuery(String query, @Nullable Integer fetch_size) { Request request = new Request("POST", QUERY_API_ENDPOINT); if (fetch_size != null) { - request.setJsonEntity(String.format("{ \"query\": \"%s\", \"fetch_size\": %d }", query, fetch_size)); + request.setJsonEntity( + String.format("{ \"query\": \"%s\", \"fetch_size\": %d }", query, fetch_size)); } else { request.setJsonEntity(String.format("{ \"query\": \"%s\" }", query)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java index e064300e4f..37398220ff 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -24,10 +23,9 @@ protected void init() throws Exception { @Test public void noGroupKeySingleFuncOverAggWithoutAliasShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT abs(MAX(age)) " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format("SELECT abs(MAX(age)) " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("abs(MAX(age))", null, "long")); verifyDataRows(response, rows(40)); @@ -35,10 +33,10 @@ public void noGroupKeySingleFuncOverAggWithoutAliasShouldPass() { @Test public void noGroupKeyMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT MAX(age) + MIN(age) as addValue " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT MAX(age) + MIN(age) as addValue " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("MAX(age) + MIN(age)", "addValue", "long")); verifyDataRows(response, rows(60)); @@ -46,10 +44,9 @@ public void noGroupKeyMaxAddMinShouldPass() { @Test public void noGroupKeyMaxAddLiteralShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT MAX(age) + 1 as `add` " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format("SELECT MAX(age) + 1 as `add` " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("MAX(age) + 1", "add", "long")); verifyDataRows(response, rows(41)); @@ -57,10 +54,9 @@ public void noGroupKeyMaxAddLiteralShouldPass() { @Test public void noGroupKeyAvgOnIntegerShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT AVG(age) as `avg` " + - "FROM %s", - Index.BANK.getName())); + JSONObject response = + executeJdbcRequest( + String.format("SELECT AVG(age) as `avg` " + "FROM %s", Index.BANK.getName())); verifySchema(response, schema("AVG(age)", "avg", "double")); verifyDataRows(response, rows(34D)); @@ -68,58 +64,49 @@ public void noGroupKeyAvgOnIntegerShouldPass() { @Test public void hasGroupKeyAvgOnIntegerShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, AVG(age) as `avg` " + - "FROM %s " + - "GROUP BY gender", - Index.BANK.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, AVG(age) as `avg` " + "FROM %s " + "GROUP BY gender", + Index.BANK.getName())); - verifySchema(response, - schema("gender", null, "text"), - schema("AVG(age)", "avg", "double")); - verifyDataRows(response, - rows("m", 34.25), - rows("f", 33.666666666666664d)); + verifySchema(response, schema("gender", null, "text"), schema("AVG(age)", "avg", "double")); + verifyDataRows(response, rows("m", 34.25), rows("f", 33.666666666666664d)); } @Test public void hasGroupKeyMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, MAX(age) + MIN(age) as addValue " + - "FROM %s " + - "GROUP BY gender", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, MAX(age) + MIN(age) as addValue " + "FROM %s " + "GROUP BY gender", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("MAX(age) + MIN(age)", "addValue", "long")); - verifyDataRows(response, - rows("m", 60), - rows("f", 60)); + verifyDataRows(response, rows("m", 60), rows("f", 60)); } @Test public void hasGroupKeyMaxAddLiteralShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, MAX(age) + 1 as `add` " + - "FROM %s " + - "GROUP BY gender", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, MAX(age) + 1 as `add` " + "FROM %s " + "GROUP BY gender", + Index.ACCOUNT.getName())); - verifySchema(response, - schema("gender", null, "text"), - schema("MAX(age) + 1", "add", "long")); - verifyDataRows(response, - rows("m", 41), - rows("f", 41)); + verifySchema(response, schema("gender", null, "text"), schema("MAX(age) + 1", "add", "long")); + verifyDataRows(response, rows("m", 41), rows("f", 41)); } @Test public void noGroupKeyLogMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT Log(MAX(age) + MIN(age)) as `log` " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT Log(MAX(age) + MIN(age)) as `log` " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("Log(MAX(age) + MIN(age))", "log", "double")); verifyDataRows(response, rows(4.0943445622221d)); @@ -127,117 +114,124 @@ public void noGroupKeyLogMaxAddMinShouldPass() { @Test public void hasGroupKeyLogMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, Log(MAX(age) + MIN(age)) as logValue " + - "FROM %s " + - "GROUP BY gender", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, Log(MAX(age) + MIN(age)) as logValue " + + "FROM %s " + + "GROUP BY gender", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("Log(MAX(age) + MIN(age))", "logValue", "double")); - verifyDataRows(response, - rows("m", 4.0943445622221d), - rows("f", 4.0943445622221d)); + verifyDataRows(response, rows("m", 4.0943445622221d), rows("f", 4.0943445622221d)); } @Test public void AddLiteralOnGroupKeyShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, age+10, max(balance) as `max` " + - "FROM %s " + - "WHERE gender = 'm' and age < 22 " + - "GROUP BY gender, age " + - "ORDER BY age", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, age+10, max(balance) as `max` " + + "FROM %s " + + "WHERE gender = 'm' and age < 22 " + + "GROUP BY gender, age " + + "ORDER BY age", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("age+10", null, "long"), schema("max(balance)", "max", "long")); - verifyDataRows(response, - rows("m", 30, 49568), - rows("m", 31, 49433)); + verifyDataRows(response, rows("m", 30, 49568), rows("m", 31, 49433)); } @Test public void logWithAddLiteralOnGroupKeyShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, Log(age+10) as logAge, max(balance) as max " + - "FROM %s " + - "WHERE gender = 'm' and age < 22 " + - "GROUP BY gender, age " + - "ORDER BY age", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, Log(age+10) as logAge, max(balance) as max " + + "FROM %s " + + "WHERE gender = 'm' and age < 22 " + + "GROUP BY gender, age " + + "ORDER BY age", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("Log(age+10)", "logAge", "double"), schema("max(balance)", "max", "long")); - verifyDataRows(response, - rows("m", 3.4011973816621555d, 49568), - rows("m", 3.4339872044851463d, 49433)); + verifyDataRows( + response, rows("m", 3.4011973816621555d, 49568), rows("m", 3.4339872044851463d, 49433)); } @Test public void logWithAddLiteralOnGroupKeyAndMaxSubtractLiteralShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, Log(age+10) as logAge, max(balance) - 100 as max " + - "FROM %s " + - "WHERE gender = 'm' and age < 22 " + - "GROUP BY gender, age " + - "ORDER BY age", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, Log(age+10) as logAge, max(balance) - 100 as max " + + "FROM %s " + + "WHERE gender = 'm' and age < 22 " + + "GROUP BY gender, age " + + "ORDER BY age", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("Log(age+10)", "logAge", "double"), schema("max(balance) - 100", "max", "long")); - verifyDataRows(response, - rows("m", 3.4011973816621555d, 49468), - rows("m", 3.4339872044851463d, 49333)); + verifyDataRows( + response, rows("m", 3.4011973816621555d, 49468), rows("m", 3.4339872044851463d, 49333)); } - /** - * The date is in JDBC format. - */ + /** The date is in JDBC format. */ @Test public void groupByDateShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT birthdate, count(*) as `count` " + - "FROM %s " + - "WHERE age < 30 " + - "GROUP BY birthdate ", - Index.BANK.getName())); - - verifySchema(response, - schema("birthdate", null, "timestamp"), - schema("count(*)", "count", "integer")); - verifyDataRows(response, - rows("2018-06-23 00:00:00", 1)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT birthdate, count(*) as `count` " + + "FROM %s " + + "WHERE age < 30 " + + "GROUP BY birthdate ", + Index.BANK.getName())); + + verifySchema( + response, schema("birthdate", null, "timestamp"), schema("count(*)", "count", "integer")); + verifyDataRows(response, rows("2018-06-23 00:00:00", 1)); } @Test public void groupByDateWithAliasShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT birthdate as birth, count(*) as `count` " + - "FROM %s " + - "WHERE age < 30 " + - "GROUP BY birthdate ", - Index.BANK.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT birthdate as birth, count(*) as `count` " + + "FROM %s " + + "WHERE age < 30 " + + "GROUP BY birthdate ", + Index.BANK.getName())); + + verifySchema( + response, schema("birthdate", "birth", "timestamp"), schema("count(*)", "count", "integer")); - verifyDataRows(response, - rows("2018-06-23 00:00:00", 1)); + verifyDataRows(response, rows("2018-06-23 00:00:00", 1)); } @Test public void aggregateCastStatementShouldNotReturnZero() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT SUM(CAST(male AS INT)) AS male_sum FROM %s", - Index.BANK.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT SUM(CAST(male AS INT)) AS male_sum FROM %s", Index.BANK.getName())); verifySchema(response, schema("SUM(CAST(male AS INT))", "male_sum", "integer")); verifyDataRows(response, rows(4)); @@ -245,8 +239,8 @@ public void aggregateCastStatementShouldNotReturnZero() { @Test public void groupByConstantShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "select 1 from %s GROUP BY 1", Index.BANK.getName())); + JSONObject response = + executeJdbcRequest(String.format("select 1 from %s GROUP BY 1", Index.BANK.getName())); verifySchema(response, schema("1", null, "integer")); verifyDataRows(response, rows(1)); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java index 9687e43823..490e9eb510 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -60,8 +59,9 @@ public void countTest() throws IOException { @Ignore("The distinct is not supported in new engine") public void countDistinctTest() { - JSONObject response = executeJdbcRequest( - String.format("SELECT COUNT(distinct gender) FROM %s", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format("SELECT COUNT(distinct gender) FROM %s", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(DISTINCT gender)", null, "integer")); verifyDataRows(response, rows(2)); @@ -71,8 +71,9 @@ public void countDistinctTest() { public void countWithDocsHintTest() throws Exception { JSONObject result = - executeQuery(String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format( + "SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", TEST_INDEX_ACCOUNT)); JSONArray hits = (JSONArray) result.query("/hits/hits"); Assert.assertThat(hits.length(), equalTo(10)); } @@ -83,8 +84,8 @@ public void sumTest() throws IOException { JSONObject result = executeQuery(String.format("SELECT SUM(balance) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert.assertThat(getDoubleAggregationValue(result, "SUM(balance)", "value"), - equalTo(25714837.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "SUM(balance)", "value"), equalTo(25714837.0)); } @Test @@ -127,23 +128,28 @@ public void statsTest() throws IOException { @Test public void extendedStatsTest() throws IOException { - JSONObject result = executeQuery(String.format("SELECT EXTENDED_STATS(age) FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery(String.format("SELECT EXTENDED_STATS(age) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert - .assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "min"), equalTo(20.0)); - Assert - .assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "max"), equalTo(40.0)); - Assert.assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "avg"), - equalTo(30.171)); - Assert.assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum"), - equalTo(30171.0)); - Assert.assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum_of_squares"), + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "min"), equalTo(20.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "max"), equalTo(40.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "avg"), equalTo(30.171)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum"), equalTo(30171.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum_of_squares"), equalTo(946393.0)); - Assert.assertEquals(6.008640362012022, - getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "std_deviation"), 0.0001); - Assert.assertEquals(36.10375899999996, - getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "variance"), 0.0001); + Assert.assertEquals( + 6.008640362012022, + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "std_deviation"), + 0.0001); + Assert.assertEquals( + 36.10375899999996, + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "variance"), + 0.0001); } @Test @@ -152,72 +158,73 @@ public void percentileTest() throws IOException { JSONObject result = executeQuery(String.format("SELECT PERCENTILES(age) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert - .assertEquals(20.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "1.0"), - 0.001); - Assert - .assertEquals(21.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "5.0"), - 0.001); - Assert - .assertEquals(25.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "25.0"), - 0.001); - // All percentiles are approximations calculated by t-digest, however, P50 has the widest distribution (not sure why) - Assert - .assertEquals(30.5, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "50.0"), - 0.6); - Assert - .assertEquals(35.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "75.0"), - 0.6); - Assert - .assertEquals(39.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "95.0"), - 0.6); - Assert - .assertEquals(40.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "99.0"), - 0.6); + Assert.assertEquals( + 20.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "1.0"), 0.001); + Assert.assertEquals( + 21.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "5.0"), 0.001); + Assert.assertEquals( + 25.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "25.0"), 0.001); + // All percentiles are approximations calculated by t-digest, however, P50 has the widest + // distribution (not sure why) + Assert.assertEquals( + 30.5, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "50.0"), 0.6); + Assert.assertEquals( + 35.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "75.0"), 0.6); + Assert.assertEquals( + 39.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "95.0"), 0.6); + Assert.assertEquals( + 40.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "99.0"), 0.6); } @Test public void percentileTestSpecific() throws IOException { - JSONObject result = executeQuery(String.format("SELECT PERCENTILES(age,25.0,75.0) FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format("SELECT PERCENTILES(age,25.0,75.0) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert.assertEquals(25.0, - getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "25.0"), 0.6); - Assert.assertEquals(35.0, - getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "75.0"), 0.6); + Assert.assertEquals( + 25.0, + getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "25.0"), + 0.6); + Assert.assertEquals( + 35.0, + getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "75.0"), + 0.6); } @Test public void aliasTest() throws IOException { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) AS mycount FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery(String.format("SELECT COUNT(*) AS mycount FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); Assert.assertThat(getIntAggregationValue(result, "mycount", "value"), equalTo(1000)); } @Test public void groupByTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT)); assertResultForGroupByTest(result); } @Test public void groupByUsingTableAliasTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s a GROUP BY a.gender", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format("SELECT COUNT(*) FROM %s a GROUP BY a.gender", TEST_INDEX_ACCOUNT)); assertResultForGroupByTest(result); } @Test public void groupByUsingTableNamePrefixTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT COUNT(*) FROM %s GROUP BY opensearch-sql_test_index_account.gender", - TEST_INDEX_ACCOUNT - )); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY opensearch-sql_test_index_account.gender", + TEST_INDEX_ACCOUNT)); assertResultForGroupByTest(result); } @@ -241,31 +248,33 @@ private void assertResultForGroupByTest(JSONObject result) { @Test public void groupByHavingTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT gender " + - "FROM %s " + - "GROUP BY gender " + - "HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT gender FROM %s GROUP BY gender HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); assertResultForGroupByHavingTest(result); } @Test public void groupByHavingUsingTableAliasTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT a.gender " + - "FROM %s a " + - "GROUP BY a.gender " + - "HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT a.gender FROM %s a GROUP BY a.gender HAVING COUNT(*) > 0", + TEST_INDEX_ACCOUNT)); assertResultForGroupByHavingTest(result); } @Test public void groupByHavingUsingTableNamePrefixTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT opensearch-sql_test_index_account.gender " + - "FROM %s " + - "GROUP BY opensearch-sql_test_index_account.gender " + - "HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT opensearch-sql_test_index_account.gender " + + "FROM %s " + + "GROUP BY opensearch-sql_test_index_account.gender " + + "HAVING COUNT(*) > 0", + TEST_INDEX_ACCOUNT)); assertResultForGroupByHavingTest(result); } @@ -287,15 +296,17 @@ private void assertResultForGroupByHavingTest(JSONObject result) { Assert.assertThat(gender.query(femaleBucketPrefix + "/count_0/value"), equalTo(493)); } - @Ignore //todo VerificationException: table alias or field name missing + @Ignore // todo VerificationException: table alias or field name missing @Test public void groupBySubqueryTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT COUNT(*) FROM %s " + - "WHERE firstname IN (SELECT firstname FROM %s) " + - "GROUP BY gender", - TEST_INDEX_ACCOUNT, TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s " + + "WHERE firstname IN (SELECT firstname FROM %s) " + + "GROUP BY gender", + TEST_INDEX_ACCOUNT, TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -316,9 +327,12 @@ public void groupBySubqueryTest() throws Exception { @Test public void postFilterTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT /*! POST_FILTER({\\\"term\\\":" + - "{\\\"gender\\\":\\\"m\\\"}}) */ COUNT(*) FROM %s GROUP BY gender", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT /*! POST_FILTER({\\\"term\\\":" + + "{\\\"gender\\\":\\\"m\\\"}}) */ COUNT(*) FROM %s GROUP BY gender", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(507)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -339,9 +353,12 @@ public void postFilterTest() throws Exception { @Test public void multipleGroupByTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender," + - " terms('field'='age','size'=200,'alias'='age')", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY gender," + + " terms('field'='age','size'=200,'alias'='age')", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -364,9 +381,11 @@ public void multipleGroupByTest() throws Exception { final Set actualAgesM = new HashSet<>(expectedAges.size()); final Set actualAgesF = new HashSet<>(expectedAges.size()); - mAgeBuckets.iterator() + mAgeBuckets + .iterator() .forEachRemaining(json -> actualAgesM.add(((JSONObject) json).getInt("key"))); - fAgeBuckets.iterator() + fAgeBuckets + .iterator() .forEachRemaining(json -> actualAgesF.add(((JSONObject) json).getInt("key"))); Assert.assertThat(actualAgesM, equalTo(expectedAges)); @@ -376,9 +395,12 @@ public void multipleGroupByTest() throws Exception { @Test public void multipleGroupBysWithSize() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender," + - " terms('alias'='ageAgg','field'='age','size'=3)", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY gender," + + " terms('alias'='ageAgg','field'='age','size'=3)", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -393,9 +415,12 @@ public void multipleGroupBysWithSize() throws Exception { @Test public void termsWithSize() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY terms" + - "('alias'='ageAgg','field'='age','size'=3)", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY terms" + + "('alias'='ageAgg','field'='age','size'=3)", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "ageAgg"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(3)); @@ -404,9 +429,12 @@ public void termsWithSize() throws Exception { @Test public void termsWithMissing() throws Exception { - JSONObject result = executeQuery(String.format("SELECT count(*) FROM %s GROUP BY terms" + - "('alias'='nick','field'='nickname','missing'='no_nickname')", - TEST_INDEX_GAME_OF_THRONES)); + JSONObject result = + executeQuery( + String.format( + "SELECT count(*) FROM %s GROUP BY terms" + + "('alias'='nick','field'='nickname','missing'='no_nickname')", + TEST_INDEX_GAME_OF_THRONES)); JSONObject nick = getAggregation(result, "nick"); Optional noNicknameBucket = Optional.empty(); @@ -427,9 +455,12 @@ public void termsWithOrder() throws Exception { final String dog1 = "snoopy"; final String dog2 = "rex"; - JSONObject result = executeQuery(String.format("SELECT count(*) FROM %s GROUP BY terms" + - "('field'='dog_name', 'alias'='dog_name', 'order'='desc')", - TEST_INDEX_DOG)); + JSONObject result = + executeQuery( + String.format( + "SELECT count(*) FROM %s GROUP BY terms" + + "('field'='dog_name', 'alias'='dog_name', 'order'='desc')", + TEST_INDEX_DOG)); JSONObject dogName = getAggregation(result, "dog_name"); String firstDog = (String) (dogName.optQuery("/buckets/0/key")); @@ -437,8 +468,12 @@ public void termsWithOrder() throws Exception { Assert.assertThat(firstDog, equalTo(dog1)); Assert.assertThat(secondDog, equalTo(dog2)); - result = executeQuery(String.format("SELECT count(*) FROM %s GROUP BY terms" + - "('field'='dog_name', 'alias'='dog_name', 'order'='asc')", TEST_INDEX_DOG)); + result = + executeQuery( + String.format( + "SELECT count(*) FROM %s GROUP BY terms" + + "('field'='dog_name', 'alias'='dog_name', 'order'='asc')", + TEST_INDEX_DOG)); dogName = getAggregation(result, "dog_name"); @@ -450,92 +485,96 @@ public void termsWithOrder() throws Exception { @Test public void orderByAscTest() { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) FROM %s " + - "GROUP BY gender ORDER BY COUNT(*)", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) FROM %s " + "GROUP BY gender ORDER BY COUNT(*)", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", null, "integer")); - verifyDataRows(response, - rows(493), - rows(507)); + verifyDataRows(response, rows(493), rows(507)); } @Test public void orderByAliasAscTest() { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) as count FROM %s " + - "GROUP BY gender ORDER BY count", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) as count FROM %s " + "GROUP BY gender ORDER BY count", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows(493), - rows(507)); + verifyDataRowsInOrder(response, rows(493), rows(507)); } @Test public void orderByDescTest() throws IOException { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) FROM %s " + - "GROUP BY gender ORDER BY COUNT(*) DESC", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) FROM %s " + "GROUP BY gender ORDER BY COUNT(*) DESC", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", null, "integer")); - verifyDataRowsInOrder(response, - rows(507), - rows(493)); + verifyDataRowsInOrder(response, rows(507), rows(493)); } @Test public void orderByAliasDescTest() throws IOException { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) as count FROM %s " + - "GROUP BY gender ORDER BY count DESC", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) as count FROM %s " + "GROUP BY gender ORDER BY count DESC", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows(507), - rows(493)); + verifyDataRowsInOrder(response, rows(507), rows(493)); } @Test public void orderByGroupFieldWithAlias() throws IOException { // ORDER BY field name - JSONObject response = executeJdbcRequest(String.format("SELECT gender as g, COUNT(*) as count " - + "FROM %s GROUP BY gender ORDER BY gender", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender as g, COUNT(*) as count " + + "FROM %s GROUP BY gender ORDER BY gender", + TEST_INDEX_ACCOUNT)); - verifySchema(response, - schema("gender", "g", "text"), - schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows("f", 493), - rows("m", 507)); + verifySchema(response, schema("gender", "g", "text"), schema("COUNT(*)", "count", "integer")); + verifyDataRowsInOrder(response, rows("f", 493), rows("m", 507)); // ORDER BY field alias - response = executeJdbcRequest(String.format("SELECT gender as g, COUNT(*) as count " - + "FROM %s GROUP BY gender ORDER BY g", TEST_INDEX_ACCOUNT)); - - verifySchema(response, - schema("gender", "g", "text"), - schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows("f", 493), - rows("m", 507)); + response = + executeJdbcRequest( + String.format( + "SELECT gender as g, COUNT(*) as count " + "FROM %s GROUP BY gender ORDER BY g", + TEST_INDEX_ACCOUNT)); + + verifySchema(response, schema("gender", "g", "text"), schema("COUNT(*)", "count", "integer")); + verifyDataRowsInOrder(response, rows("f", 493), rows("m", 507)); } @Test public void limitTest() throws IOException { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) FROM %s " + - "GROUP BY age ORDER BY COUNT(*) LIMIT 5", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) FROM %s " + "GROUP BY age ORDER BY COUNT(*) LIMIT 5", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", null, "integer")); - verifyDataRowsInOrder(response, - rows(35), - rows(39), - rows(39), - rows(42), - rows(42)); + verifyDataRowsInOrder(response, rows(35), rows(39), rows(39), rows(42), rows(42)); } @Test public void countGroupByRange() throws IOException { - JSONObject result = executeQuery(String.format("SELECT COUNT(age) FROM %s" + - " GROUP BY range(age, 20,25,30,35,40)", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(age) FROM %s" + " GROUP BY range(age, 20,25,30,35,40)", + TEST_INDEX_ACCOUNT)); JSONObject ageAgg = getAggregation(result, "range(age,20,25,30,35,40)"); JSONArray buckets = ageAgg.getJSONArray("buckets"); Assert.assertThat(buckets.length(), equalTo(4)); @@ -544,7 +583,8 @@ public void countGroupByRange() throws IOException { for (int i = 0; i < expectedResults.length; ++i) { - Assert.assertThat(buckets.query(String.format(Locale.ROOT, "/%d/COUNT(age)/value", i)), + Assert.assertThat( + buckets.query(String.format(Locale.ROOT, "/%d/COUNT(age)/value", i)), equalTo(expectedResults[i])); } } @@ -556,42 +596,58 @@ public void countGroupByRange() throws IOException { public void countGroupByDateTest() throws IOException { String result = - explainQuery(String.format("select insert_time from %s group by date_histogram" + - "('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','min_doc_count'=5) ", - TEST_INDEX_ONLINE)); - Assert.assertThat(result.replaceAll("\\s+", ""), - containsString("{\"date_histogram\":{\"field\":\"insert_time\",\"format\":\"yyyy-MM\"," + - "\"fixed_interval\":\"1h\",\"offset\":0,\"order\":{\"_key\":\"asc\"},\"keyed\":false," + - "\"min_doc_count\":5}")); + explainQuery( + String.format( + "select insert_time from %s group by" + + " date_histogram('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','min_doc_count'=5)" + + " ", + TEST_INDEX_ONLINE)); + Assert.assertThat( + result.replaceAll("\\s+", ""), + containsString( + "{\"date_histogram\":{\"field\":\"insert_time\",\"format\":\"yyyy-MM\"," + + "\"fixed_interval\":\"1h\",\"offset\":0,\"order\":{\"_key\":\"asc\"},\"keyed\":false," + + "\"min_doc_count\":5}")); } @Test public void countGroupByDateTestWithAlias() throws IOException { String result = - explainQuery(String.format("select insert_time from %s group by date_histogram" + - "('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','alias'='myAlias')", - TEST_INDEX_ONLINE)); - Assert.assertThat(result.replaceAll("\\s+", ""), - containsString("myAlias\":{\"date_histogram\":{\"field\":\"insert_time\"," + - "\"format\":\"yyyy-MM\",\"fixed_interval\":\"1h\"")); - } - -// /** -// * http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html -// */ -// @Test -// public void countDateRangeTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// String result = explainQuery(String.format("select online from %s group by date_range(field='insert_time'," + -// "'format'='yyyy-MM-dd' ,'2014-08-18','2014-08-17','now-8d','now-7d','now-6d','now')", -// TEST_INDEX_ONLINE)); -// // TODO: fix the query or fix the code for the query to work -// } + explainQuery( + String.format( + "select insert_time from %s group by date_histogram" + + "('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','alias'='myAlias')", + TEST_INDEX_ONLINE)); + Assert.assertThat( + result.replaceAll("\\s+", ""), + containsString( + "myAlias\":{\"date_histogram\":{\"field\":\"insert_time\"," + + "\"format\":\"yyyy-MM\",\"fixed_interval\":\"1h\"")); + } + + /** + * + * http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html + */ + @Test + @Ignore + public void countDateRangeTest() throws IOException { + String result = + explainQuery( + String.format( + "select online from %s group by date_range(" + + "field='insert_time', 'format'='yyyy-MM-dd' ,'2014-08-18','2014-08-17', " + + "'now-8d','now-7d','now-6d','now')", + TEST_INDEX_ONLINE)); + // TODO: fix the query or fix the code for the query to work + } @Test public void topHitTest() throws IOException { - String query = String - .format("select topHits('size'=3,age='desc') from %s group by gender", TEST_INDEX_ACCOUNT); + String query = + String.format( + "select topHits('size'=3,age='desc') from %s group by gender", TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -604,7 +660,8 @@ public void topHitTest() throws IOException { final String femaleBucketPrefix = String.format(Locale.ROOT, "/buckets/%d", femaleBucketId); Assert.assertThat(gender.query(maleBucketPrefix + "/key"), equalTo("m")); - Assert.assertThat(gender.query(maleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), + Assert.assertThat( + gender.query(maleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), equalTo(507)); Assert.assertThat( gender.query(maleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/relation"), @@ -614,9 +671,9 @@ public void topHitTest() throws IOException { .length(), equalTo(3)); Assert.assertThat(gender.query(femaleBucketPrefix + "/key"), equalTo("f")); - Assert - .assertThat(gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), - equalTo(493)); + Assert.assertThat( + gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), + equalTo(493)); Assert.assertThat( gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/relation"), equalTo("eq")); @@ -630,7 +687,8 @@ public void topHitTest() throws IOException { public void topHitTest_WithInclude() throws IOException { String query = - String.format("select topHits('size'=3,age='desc','include'=age) from %s group by gender", + String.format( + "select topHits('size'=3,age='desc','include'=age) from %s group by gender", TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); @@ -647,28 +705,41 @@ public void topHitTest_WithInclude() throws IOException { Assert.assertThat( gender.query(maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/value"), equalTo(507)); - Assert.assertThat(gender - .query(maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), + Assert.assertThat( + gender.query( + maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query(maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")) + .length(), equalTo(3)); Assert.assertThat(gender.query(femaleBucketPrefix + "/key"), equalTo("f")); Assert.assertThat( gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/value"), equalTo(493)); - Assert.assertThat(gender - .query(femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), + Assert.assertThat( + gender.query( + femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query( + femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")) + .length(), equalTo(3)); for (int i = 0; i < 2; ++i) { for (int j = 0; j < 3; ++j) { - JSONObject source = (JSONObject) gender.query(String.format(Locale.ROOT, - "/buckets/%d/topHits(size=3,age=desc,include=age)/hits/hits/%d/_source", i, j)); + JSONObject source = + (JSONObject) + gender.query( + String.format( + Locale.ROOT, + "/buckets/%d/topHits(size=3,age=desc,include=age)/hits/hits/%d/_source", + i, + j)); Assert.assertThat(source.length(), equalTo(1)); Assert.assertTrue(source.has("age")); Assert.assertThat(source.getInt("age"), equalTo(40)); @@ -680,17 +751,24 @@ public void topHitTest_WithInclude() throws IOException { public void topHitTest_WithIncludeTwoFields() throws IOException { String query = - String.format("select topHits('size'=3,'include'='age,firstname',age='desc') from %s " + - "group by gender", TEST_INDEX_ACCOUNT); + String.format( + "select topHits('size'=3,'include'='age,firstname',age='desc') from %s " + + "group by gender", + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); for (int i = 0; i < 2; ++i) { for (int j = 0; j < 3; ++j) { - JSONObject source = (JSONObject) gender.query(String.format(Locale.ROOT, - "/buckets/%d/topHits(size=3,include=age,firstname,age=desc)/hits/hits/%d/_source", i, - j)); + JSONObject source = + (JSONObject) + gender.query( + String.format( + Locale.ROOT, + "/buckets/%d/topHits(size=3,include=age,firstname,age=desc)/hits/hits/%d/_source", + i, + j)); Assert.assertThat(source.length(), equalTo(2)); Assert.assertTrue(source.has("age")); Assert.assertThat(source.getInt("age"), equalTo(40)); @@ -704,8 +782,10 @@ public void topHitTest_WithIncludeTwoFields() throws IOException { @Test public void topHitTest_WithExclude() throws IOException { - String query = String.format("select topHits('size'=3,'exclude'='lastname',age='desc') from " + - "%s group by gender", TEST_INDEX_ACCOUNT); + String query = + String.format( + "select topHits('size'=3,'exclude'='lastname',age='desc') from " + "%s group by gender", + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -718,44 +798,61 @@ public void topHitTest_WithExclude() throws IOException { final String femaleBucketPrefix = String.format(Locale.ROOT, "/buckets/%d", femaleBucketId); Assert.assertThat(gender.query(maleBucketPrefix + "/key"), equalTo("m")); - Assert.assertThat(gender - .query(maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), + Assert.assertThat( + gender.query( + maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), equalTo(507)); - Assert.assertThat(gender - .query(maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), + Assert.assertThat( + gender.query( + maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query( + maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")) + .length(), equalTo(3)); Assert.assertThat(gender.query(femaleBucketPrefix + "/key"), equalTo("f")); - Assert.assertThat(gender - .query(femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), + Assert.assertThat( + gender.query( + femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), equalTo(493)); - Assert.assertThat(gender.query( - femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), + Assert.assertThat( + gender.query( + femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query( + femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")) + .length(), equalTo(3)); - final Set expectedFields = new HashSet<>(Arrays.asList( - "account_number", - "firstname", - "address", - "balance", - "gender", - "city", - "employer", - "state", - "age", - "email" - )); + final Set expectedFields = + new HashSet<>( + Arrays.asList( + "account_number", + "firstname", + "address", + "balance", + "gender", + "city", + "employer", + "state", + "age", + "email")); for (int i = 0; i < 2; ++i) { for (int j = 0; j < 3; ++j) { - JSONObject source = (JSONObject) gender.query(String.format(Locale.ROOT, - "/buckets/%d/topHits(size=3,exclude=lastname,age=desc)/hits/hits/%d/_source", i, j)); + JSONObject source = + (JSONObject) + gender.query( + String.format( + Locale.ROOT, + "/buckets/%d/topHits(size=3,exclude=lastname,age=desc)/hits/hits/%d/_source", + i, + j)); Assert.assertThat(source.length(), equalTo(expectedFields.size())); Assert.assertFalse(source.has("lastname")); Assert.assertThat(source.keySet().containsAll(expectedFields), equalTo(true)); @@ -763,254 +860,300 @@ public void topHitTest_WithExclude() throws IOException { } } - //region not migrated - - // script on metric aggregation tests. uncomment if your elastic has scripts enable (disabled by default) -// @Test -// public void sumWithScriptTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("SELECT SUM(script('','doc[\\'balance\\'].value + doc[\\'balance\\'].value')) as doubleSum FROM %s", TEST_INDEX)); -// Sum sum = result.get("doubleSum"); -// assertThat(sum.getValue(), equalTo(25714837.0*2)); -// } -// -// @Test -// public void sumWithImplicitScriptTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("SELECT SUM(balance + balance) as doubleSum FROM %s", TEST_INDEX)); -// Sum sum = result.get("doubleSum"); -// assertThat(sum.getValue(), equalTo(25714837.0*2)); -// } -// -// @Test -// public void sumWithScriptTestNoAlias() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("SELECT SUM(balance + balance) FROM %s", TEST_INDEX)); -// Sum sum = result.get("SUM(script=script(balance + balance,doc('balance').value + doc('balance').value))"); -// assertThat(sum.getValue(), equalTo(25714837.0*2)); -// } -// -// @Test -// public void scriptedMetricAggregation() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query ("select scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value); } " + -// "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value); }}'," + -// "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ sumThem += a.get(\\'ages\\');} }; return sumThem;') as wierdSum from " + TEST_INDEX + ""); -// ScriptedMetric metric = result.get("wierdSum"); -// Assert.assertEquals(136L,metric.aggregation()); -// } -// -// @Test -// public void scriptedMetricConcatWithStringParamAndReduceParamAggregation() throws SQLFeatureNotSupportedException, SqlParseException { -// String query = "select scripted_metric(\n" + -// " 'init_script' = '_agg[\"concat\"]=[] ',\n" + -// " 'map_script'='_agg.concat.add(doc[field].value)' ,\n" + -// " 'combine_script'='return _agg.concat.join(delim);',\t\t\t\t\n" + -// " 'reduce_script'='_aggs.removeAll(\"\"); return _aggs.join(delim)'," + -// "'@field' = 'name.firstname' , '@delim'=';',@reduce_delim =';' ) as all_characters \n" + -// "from "+TEST_INDEX+""; -// Aggregations result = query (query); -// ScriptedMetric metric = result.get("all_characters"); -// List names = Arrays.asList(metric.aggregation().toString().split(";")); -// -// -// Assert.assertEquals(4,names.size()); -// String[] expectedNames = new String[]{"brandon","daenerys","eddard","jaime"}; -// for(String name : expectedNames){ -// Assert.assertTrue("not contains:" + name,names.contains(name)); -// } -// } -// -// @Test -// public void scriptedMetricAggregationWithNumberParams() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query ("select scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value+x); } " + -// "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value+x); }}'," + -// "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ sumThem += a.get(\\'ages\\');} }; return sumThem;'" + -// ",'@x'=3) as wierdSum from " + TEST_INDEX + ""); -// ScriptedMetric metric = result.get("wierdSum"); -// Assert.assertEquals(148L,metric.aggregation()); -// } -// - -// @Test -// public void topHitTest_WithIncludeAndExclude() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("select topHits('size'=3,'exclude'='lastname','include'='firstname,lastname',age='desc') from %s group by gender ", TEST_INDEX_ACCOUNT)); -// List buckets = ((Terms) (result.asList().get(0))).getBuckets(); -// for (Terms.Bucket bucket : buckets) { -// SearchHits hits = ((InternalTopHits) bucket.getAggregations().asList().get(0)).getHits(); -// for (SearchHit hit : hits) { -// Set fields = hit.getSourceAsMap().keySet(); -// Assert.assertEquals(1, fields.size()); -// Assert.assertTrue(fields.contains("firstname")); -// } -// } -// } -// -// private Aggregations query(String query) throws SqlParseException, SQLFeatureNotSupportedException { -// SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); -// return ((SearchResponse)select.get()).getAggregations(); -// } -// -// private SqlElasticSearchRequestBuilder getSearchRequestBuilder(String query) throws SqlParseException, SQLFeatureNotSupportedException { -// SearchDao searchDao = MainTestSuite.getSearchDao(); -// return (SqlElasticSearchRequestBuilder) searchDao.explain(query).explain(); -// } -// -// @Test -// public void testFromSizeWithAggregations() throws Exception { -// final String query1 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(0,1) */" + -// " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); -// SearchResponse response1 = (SearchResponse) getSearchRequestBuilder(query1).get(); -// -// Assert.assertEquals(1, response1.getHits().getHits().length); -// Terms gender1 = response1.getAggregations().get("gender"); -// Assert.assertEquals(2, gender1.getBuckets().size()); -// Object account1 = response1.getHits().getHits()[0].getSourceAsMap().get("account_number"); -// -// final String query2 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(1,1) */" + -// " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); -// SearchResponse response2 = (SearchResponse) getSearchRequestBuilder(query2).get(); -// -// Assert.assertEquals(1, response2.getHits().getHits().length); -// Terms gender2 = response2.getAggregations().get("gender"); -// Assert.assertEquals(2, gender2.getBuckets().size()); -// Object account2 = response2.getHits().getHits()[0].getSourceAsMap().get("account_number"); -// -// Assert.assertEquals(response1.getHits().getTotalHits(), response2.getHits().getTotalHits()); -// Assert.assertNotEquals(account1, account2); -// } -// -// @Test -// public void testSubAggregations() throws Exception { -// Set expectedAges = new HashSet<>(ContiguousSet.create(Range.closed(20, 40), DiscreteDomain.integers())); -// final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */" + -// " * FROM %s GROUP BY (gender, terms('field'='age','size'=200,'alias'='age')), (state) LIMIT 200,200", TEST_INDEX_ACCOUNT); -// -// Map> buckets = new HashMap<>(); -// -// SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); -// SearchResponse response = (SearchResponse) select.get(); -// Aggregations result = response.getAggregations(); -// -// Terms gender = result.get("gender"); -// for(Terms.Bucket genderBucket : gender.getBuckets()) { -// String genderKey = genderBucket.getKey().toString(); -// buckets.put(genderKey, new HashSet()); -// Terms ageBuckets = (Terms) genderBucket.getAggregations().get("age"); -// for(Terms.Bucket ageBucket : ageBuckets.getBuckets()) { -// buckets.get(genderKey).add(Integer.parseInt(ageBucket.getKey().toString())); -// } -// } -// -// Assert.assertEquals(2, buckets.keySet().size()); -// Assert.assertEquals(expectedAges, buckets.get("m")); -// Assert.assertEquals(expectedAges, buckets.get("f")); -// -// Terms state = result.get("state.keyword"); -// for(Terms.Bucket stateBucket : state.getBuckets()) { -// if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { -// Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() == 22); -// } -// } -// -// Assert.assertEquals(response.getHits().getTotalHits(), 1000); -// Assert.assertEquals(response.getHits().getHits().length, 10); -// } -// -// @Test -// public void testSimpleSubAggregations() throws Exception { -// final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ * FROM %s GROUP BY (gender), (state) ", TEST_INDEX_ACCOUNT); -// -// SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); -// SearchResponse response = (SearchResponse) select.get(); -// Aggregations result = response.getAggregations(); -// -// Terms gender = result.get("gender"); -// for(Terms.Bucket genderBucket : gender.getBuckets()) { -// String genderKey = genderBucket.getKey().toString(); -// Assert.assertTrue("Gender should be m or f", genderKey.equals("m") || genderKey.equals("f")); -// } -// -// Assert.assertEquals(2, gender.getBuckets().size()); -// -// Terms state = result.get("state.keyword"); -// for(Terms.Bucket stateBucket : state.getBuckets()) { -// if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { -// Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() == 22); -// } -// } -// -// Assert.assertEquals(response.getHits().getTotalHits(), 1000); -// Assert.assertEquals(response.getHits().getHits().length, 10); -// } -// -// @Test -// public void geoHashGrid() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query(String.format("SELECT COUNT(*) FROM %s/location GROUP BY geohash_grid(field='center',precision=5) ", TEST_INDEX_LOCATION)); -// InternalGeoHashGrid grid = result.get("geohash_grid(field=center,precision=5)"); -// Collection buckets = grid.getBuckets(); -// for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { -// Assert.assertTrue(bucket.getKeyAsString().equals("w2fsm") || bucket.getKeyAsString().equals("w0p6y") ); -// Assert.assertEquals(1,bucket.getDocCount()); -// } -// } -// -// @Test -// public void geoBounds() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query(String.format("SELECT * FROM %s/location GROUP BY geo_bounds(field='center',alias='bounds') ", TEST_INDEX_LOCATION)); -// InternalGeoBounds bounds = result.get("bounds"); -// Assert.assertEquals(0.5,bounds.bottomRight().getLat(),0.001); -// Assert.assertEquals(105.0,bounds.bottomRight().getLon(),0.001); -// Assert.assertEquals(5.0,bounds.topLeft().getLat(),0.001); -// Assert.assertEquals(100.5,bounds.topLeft().getLon(),0.001); -// } -// -// @Test -// public void groupByOnNestedFieldTest() throws Exception { -// Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)", TEST_INDEX_NESTED_TYPE)); -// InternalNested nested = result.get("message.info@NESTED"); -// Terms infos = nested.getAggregations().get("message.info"); -// Assert.assertEquals(3,infos.getBuckets().size()); -// for(Terms.Bucket bucket : infos.getBuckets()) { -// String key = bucket.getKey().toString(); -// long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); -// if(key.equalsIgnoreCase("a")) { -// Assert.assertEquals(2, count); -// } -// else if(key.equalsIgnoreCase("c")) { -// Assert.assertEquals(2, count); -// } -// else if(key.equalsIgnoreCase("b")) { -// Assert.assertEquals(1, count); -// } -// else { -// throw new Exception(String.format("Unexpected key. expected: a OR b OR c . found: %s", key)); -// } -// } -// } -// -// @Test -// public void groupByTestWithFilter() throws Exception { -// Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY filter(gender='m'),gender", TEST_INDEX_ACCOUNT)); -// InternalFilter filter = result.get("filter(gender = 'm')@FILTER"); -// Terms gender = filter.getAggregations().get("gender"); -// -// for(Terms.Bucket bucket : gender.getBuckets()) { -// String key = bucket.getKey().toString(); -// long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); -// if(key.equalsIgnoreCase("m")) { -// Assert.assertEquals(507, count); -// } -// else { -// throw new Exception(String.format("Unexpected key. expected: only m. found: %s", key)); -// } -// } -// } -// -// - //endregion not migrated + // region not migrated + + // script on metric aggregation tests. uncomment if your elastic has scripts enable (disabled by + // default) + // @Test + // public void sumWithScriptTest() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("SELECT + // SUM(script('','doc[\\'balance\\'].value + doc[\\'balance\\'].value')) as doubleSum FROM %s", + // TEST_INDEX)); + // Sum sum = result.get("doubleSum"); + // assertThat(sum.getValue(), equalTo(25714837.0*2)); + // } + // + // @Test + // public void sumWithImplicitScriptTest() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("SELECT SUM(balance + balance) as doubleSum + // FROM %s", TEST_INDEX)); + // Sum sum = result.get("doubleSum"); + // assertThat(sum.getValue(), equalTo(25714837.0*2)); + // } + // + // @Test + // public void sumWithScriptTestNoAlias() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("SELECT SUM(balance + balance) FROM %s", + // TEST_INDEX)); + // Sum sum = result.get("SUM(script=script(balance + balance,doc('balance').value + + // doc('balance').value))"); + // assertThat(sum.getValue(), equalTo(25714837.0*2)); + // } + // + // @Test + // public void scriptedMetricAggregation() throws SQLFeatureNotSupportedException, + // SqlParseException { + // Aggregations result = query ("select + // scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ + // if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value); } " + + // "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value); }}'," + + // "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ + // sumThem += a.get(\\'ages\\');} }; return sumThem;') as wierdSum from " + TEST_INDEX + ""); + // ScriptedMetric metric = result.get("wierdSum"); + // Assert.assertEquals(136L,metric.aggregation()); + // } + // + // @Test + // public void scriptedMetricConcatWithStringParamAndReduceParamAggregation() throws + // SQLFeatureNotSupportedException, SqlParseException { + // String query = "select scripted_metric(\n" + + // " 'init_script' = '_agg[\"concat\"]=[] ',\n" + + // " 'map_script'='_agg.concat.add(doc[field].value)' ,\n" + + // " 'combine_script'='return _agg.concat.join(delim);',\t\t\t\t\n" + + // " 'reduce_script'='_aggs.removeAll(\"\"); return _aggs.join(delim)'," + + // "'@field' = 'name.firstname' , '@delim'=';',@reduce_delim =';' ) as + // all_characters \n" + + // "from "+TEST_INDEX+""; + // Aggregations result = query (query); + // ScriptedMetric metric = result.get("all_characters"); + // List names = Arrays.asList(metric.aggregation().toString().split(";")); + // + // + // Assert.assertEquals(4,names.size()); + // String[] expectedNames = new String[]{"brandon","daenerys","eddard","jaime"}; + // for(String name : expectedNames){ + // Assert.assertTrue("not contains:" + name,names.contains(name)); + // } + // } + // + // @Test + // public void scriptedMetricAggregationWithNumberParams() throws + // SQLFeatureNotSupportedException, SqlParseException { + // Aggregations result = query ("select + // scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ + // if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value+x); } " + + // "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value+x); }}'," + // + + // "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ + // sumThem += a.get(\\'ages\\');} }; return sumThem;'" + + // ",'@x'=3) as wierdSum from " + TEST_INDEX + ""); + // ScriptedMetric metric = result.get("wierdSum"); + // Assert.assertEquals(148L,metric.aggregation()); + // } + // + + // @Test + // public void topHitTest_WithIncludeAndExclude() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("select + // topHits('size'=3,'exclude'='lastname','include'='firstname,lastname',age='desc') from %s group + // by gender ", TEST_INDEX_ACCOUNT)); + // List buckets = ((Terms) (result.asList().get(0))).getBuckets(); + // for (Terms.Bucket bucket : buckets) { + // SearchHits hits = ((InternalTopHits) + // bucket.getAggregations().asList().get(0)).getHits(); + // for (SearchHit hit : hits) { + // Set fields = hit.getSourceAsMap().keySet(); + // Assert.assertEquals(1, fields.size()); + // Assert.assertTrue(fields.contains("firstname")); + // } + // } + // } + // + // private Aggregations query(String query) throws SqlParseException, + // SQLFeatureNotSupportedException { + // SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); + // return ((SearchResponse)select.get()).getAggregations(); + // } + // + // private SqlElasticSearchRequestBuilder getSearchRequestBuilder(String query) throws + // SqlParseException, SQLFeatureNotSupportedException { + // SearchDao searchDao = MainTestSuite.getSearchDao(); + // return (SqlElasticSearchRequestBuilder) searchDao.explain(query).explain(); + // } + // + // @Test + // public void testFromSizeWithAggregations() throws Exception { + // final String query1 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(0,1) */" + + // " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); + // SearchResponse response1 = (SearchResponse) getSearchRequestBuilder(query1).get(); + // + // Assert.assertEquals(1, response1.getHits().getHits().length); + // Terms gender1 = response1.getAggregations().get("gender"); + // Assert.assertEquals(2, gender1.getBuckets().size()); + // Object account1 = + // response1.getHits().getHits()[0].getSourceAsMap().get("account_number"); + // + // final String query2 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(1,1) */" + + // " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); + // SearchResponse response2 = (SearchResponse) getSearchRequestBuilder(query2).get(); + // + // Assert.assertEquals(1, response2.getHits().getHits().length); + // Terms gender2 = response2.getAggregations().get("gender"); + // Assert.assertEquals(2, gender2.getBuckets().size()); + // Object account2 = + // response2.getHits().getHits()[0].getSourceAsMap().get("account_number"); + // + // Assert.assertEquals(response1.getHits().getTotalHits(), + // response2.getHits().getTotalHits()); + // Assert.assertNotEquals(account1, account2); + // } + // + // @Test + // public void testSubAggregations() throws Exception { + // Set expectedAges = new HashSet<>(ContiguousSet.create(Range.closed(20, 40), + // DiscreteDomain.integers())); + // final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */" + + // " * FROM %s GROUP BY (gender, terms('field'='age','size'=200,'alias'='age')), + // (state) LIMIT 200,200", TEST_INDEX_ACCOUNT); + // + // Map> buckets = new HashMap<>(); + // + // SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); + // SearchResponse response = (SearchResponse) select.get(); + // Aggregations result = response.getAggregations(); + // + // Terms gender = result.get("gender"); + // for(Terms.Bucket genderBucket : gender.getBuckets()) { + // String genderKey = genderBucket.getKey().toString(); + // buckets.put(genderKey, new HashSet()); + // Terms ageBuckets = (Terms) genderBucket.getAggregations().get("age"); + // for(Terms.Bucket ageBucket : ageBuckets.getBuckets()) { + // buckets.get(genderKey).add(Integer.parseInt(ageBucket.getKey().toString())); + // } + // } + // + // Assert.assertEquals(2, buckets.keySet().size()); + // Assert.assertEquals(expectedAges, buckets.get("m")); + // Assert.assertEquals(expectedAges, buckets.get("f")); + // + // Terms state = result.get("state.keyword"); + // for(Terms.Bucket stateBucket : state.getBuckets()) { + // if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { + // Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() + // == 22); + // } + // } + // + // Assert.assertEquals(response.getHits().getTotalHits(), 1000); + // Assert.assertEquals(response.getHits().getHits().length, 10); + // } + // + // @Test + // public void testSimpleSubAggregations() throws Exception { + // final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ * FROM %s + // GROUP BY (gender), (state) ", TEST_INDEX_ACCOUNT); + // + // SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); + // SearchResponse response = (SearchResponse) select.get(); + // Aggregations result = response.getAggregations(); + // + // Terms gender = result.get("gender"); + // for(Terms.Bucket genderBucket : gender.getBuckets()) { + // String genderKey = genderBucket.getKey().toString(); + // Assert.assertTrue("Gender should be m or f", genderKey.equals("m") || + // genderKey.equals("f")); + // } + // + // Assert.assertEquals(2, gender.getBuckets().size()); + // + // Terms state = result.get("state.keyword"); + // for(Terms.Bucket stateBucket : state.getBuckets()) { + // if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { + // Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() + // == 22); + // } + // } + // + // Assert.assertEquals(response.getHits().getTotalHits(), 1000); + // Assert.assertEquals(response.getHits().getHits().length, 10); + // } + // + // @Test + // public void geoHashGrid() throws SQLFeatureNotSupportedException, SqlParseException { + // Aggregations result = query(String.format("SELECT COUNT(*) FROM %s/location GROUP BY + // geohash_grid(field='center',precision=5) ", TEST_INDEX_LOCATION)); + // InternalGeoHashGrid grid = result.get("geohash_grid(field=center,precision=5)"); + // Collection buckets = + // grid.getBuckets(); + // for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { + // Assert.assertTrue(bucket.getKeyAsString().equals("w2fsm") || + // bucket.getKeyAsString().equals("w0p6y") ); + // Assert.assertEquals(1,bucket.getDocCount()); + // } + // } + // + // @Test + // public void geoBounds() throws SQLFeatureNotSupportedException, SqlParseException { + // Aggregations result = query(String.format("SELECT * FROM %s/location GROUP BY + // geo_bounds(field='center',alias='bounds') ", TEST_INDEX_LOCATION)); + // InternalGeoBounds bounds = result.get("bounds"); + // Assert.assertEquals(0.5,bounds.bottomRight().getLat(),0.001); + // Assert.assertEquals(105.0,bounds.bottomRight().getLon(),0.001); + // Assert.assertEquals(5.0,bounds.topLeft().getLat(),0.001); + // Assert.assertEquals(100.5,bounds.topLeft().getLon(),0.001); + // } + // + // @Test + // public void groupByOnNestedFieldTest() throws Exception { + // Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY + // nested(message.info)", TEST_INDEX_NESTED_TYPE)); + // InternalNested nested = result.get("message.info@NESTED"); + // Terms infos = nested.getAggregations().get("message.info"); + // Assert.assertEquals(3,infos.getBuckets().size()); + // for(Terms.Bucket bucket : infos.getBuckets()) { + // String key = bucket.getKey().toString(); + // long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); + // if(key.equalsIgnoreCase("a")) { + // Assert.assertEquals(2, count); + // } + // else if(key.equalsIgnoreCase("c")) { + // Assert.assertEquals(2, count); + // } + // else if(key.equalsIgnoreCase("b")) { + // Assert.assertEquals(1, count); + // } + // else { + // throw new Exception(String.format("Unexpected key. expected: a OR b OR c . + // found: %s", key)); + // } + // } + // } + // + // @Test + // public void groupByTestWithFilter() throws Exception { + // Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY + // filter(gender='m'),gender", TEST_INDEX_ACCOUNT)); + // InternalFilter filter = result.get("filter(gender = 'm')@FILTER"); + // Terms gender = filter.getAggregations().get("gender"); + // + // for(Terms.Bucket bucket : gender.getBuckets()) { + // String key = bucket.getKey().toString(); + // long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); + // if(key.equalsIgnoreCase("m")) { + // Assert.assertEquals(507, count); + // } + // else { + // throw new Exception(String.format("Unexpected key. expected: only m. found: %s", + // key)); + // } + // } + // } + // + // + // endregion not migrated @Test public void groupByOnNestedFieldWithFilterTest() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a')", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + + "filter('myFilter',message.info = 'a')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1026,29 +1169,36 @@ public void groupByOnNestedFieldWithFilterTest() throws Exception { @Test public void minOnNestedField() throws Exception { - String query = String.format("SELECT min(nested(message.dayOfWeek)) as minDays FROM %s", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT min(nested(message.dayOfWeek)) as minDays FROM %s", TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.dayOfWeek@NESTED"); - Assert.assertEquals(1.0, ((BigDecimal) aggregation.query("/minDays/value")).doubleValue(), 0.0001); + Assert.assertEquals( + 1.0, ((BigDecimal) aggregation.query("/minDays/value")).doubleValue(), 0.0001); } @Test public void sumOnNestedField() throws Exception { - String query = String.format("SELECT sum(nested(message.dayOfWeek)) as sumDays FROM %s", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT sum(nested(message.dayOfWeek)) as sumDays FROM %s", TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.dayOfWeek@NESTED"); - Assert.assertEquals(19.0, ((BigDecimal) aggregation.query("/sumDays/value")).doubleValue(), 0.0001); + Assert.assertEquals( + 19.0, ((BigDecimal) aggregation.query("/sumDays/value")).doubleValue(), 0.0001); } @Test public void histogramOnNestedField() throws Exception { - String query = String.format("select count(*) from %s group by histogram" + - "('field'='message.dayOfWeek','nested'='message','interval'='2' , 'alias' = 'someAlias' )", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "select count(*) from %s group by" + + " histogram('field'='message.dayOfWeek','nested'='message','interval'='2' ," + + " 'alias' = 'someAlias' )", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message@NESTED"); @@ -1061,22 +1211,26 @@ public void histogramOnNestedField() throws Exception { JSONArray buckets = (JSONArray) aggregation.query("/someAlias/buckets"); Assert.assertThat(buckets.length(), equalTo(4)); - buckets.forEach(obj -> { - JSONObject bucket = (JSONObject) obj; - final double key = bucket.getDouble("key"); - Assert.assertTrue(expectedCountsByKey.containsKey(key)); - Assert.assertThat(bucket.getJSONObject("COUNT(*)").getInt("value"), - equalTo(expectedCountsByKey.get(key))); - }); + buckets.forEach( + obj -> { + JSONObject bucket = (JSONObject) obj; + final double key = bucket.getDouble("key"); + Assert.assertTrue(expectedCountsByKey.containsKey(key)); + Assert.assertThat( + bucket.getJSONObject("COUNT(*)").getInt("value"), + equalTo(expectedCountsByKey.get(key))); + }); } @Test public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedAndEmptyPath() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a'),reverse_nested(someField,'')", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + + "filter('myFilter',message.info = 'a'),reverse_nested(someField,'')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1097,8 +1251,11 @@ public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedAndE public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPath() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter" + - "('myFilter',message.info = 'a'),reverse_nested(someField)", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter" + + "('myFilter',message.info = 'a'),reverse_nested(someField)", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1119,9 +1276,12 @@ public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPa public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHistogram() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a'),histogram('field'='myNum','reverse_nested'='','interval'='2', " + - "'alias' = 'someAlias' )", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter('myFilter',message.info" + + " = 'a'),histogram('field'='myNum','reverse_nested'='','interval'='2', 'alias' =" + + " 'someAlias' )", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1140,21 +1300,26 @@ public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHi expectedCountsByKey.put(2.0, 0); expectedCountsByKey.put(4.0, 1); - someAliasBuckets.forEach(obj -> { - JSONObject bucket = (JSONObject) obj; - final double key = bucket.getDouble("key"); - Assert.assertTrue(expectedCountsByKey.containsKey(key)); - Assert.assertThat(bucket.getJSONObject("COUNT(*)").getInt("value"), - equalTo(expectedCountsByKey.get(key))); - }); + someAliasBuckets.forEach( + obj -> { + JSONObject bucket = (JSONObject) obj; + final double key = bucket.getDouble("key"); + Assert.assertTrue(expectedCountsByKey.containsKey(key)); + Assert.assertThat( + bucket.getJSONObject("COUNT(*)").getInt("value"), + equalTo(expectedCountsByKey.get(key))); + }); } @Test public void reverseToRootGroupByOnNestedFieldWithFilterAndSumOnReverseNestedField() throws Exception { - String query = String.format("SELECT sum(reverse_nested(myNum)) bla FROM %s GROUP BY " + - "nested(message.info),filter('myFilter',message.info = 'a')", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT sum(reverse_nested(myNum)) bla FROM %s GROUP BY " + + "nested(message.info),filter('myFilter',message.info = 'a')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1172,9 +1337,11 @@ public void reverseToRootGroupByOnNestedFieldWithFilterAndSumOnReverseNestedFiel public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPath() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a'),reverse_nested(comment.data,'~comment')", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + + "filter('myFilter',message.info = 'a'),reverse_nested(comment.data,'~comment')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1184,8 +1351,9 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes Assert.assertThat(msgInfoBuckets.length(), equalTo(1)); JSONArray commentDataBuckets = - (JSONArray) msgInfoBuckets.optQuery("/0/comment.data@NESTED_REVERSED" + - "/comment.data@NESTED/comment.data/buckets"); + (JSONArray) + msgInfoBuckets.optQuery( + "/0/comment.data@NESTED_REVERSED" + "/comment.data@NESTED/comment.data/buckets"); Assert.assertNotNull(commentDataBuckets); Assert.assertThat(commentDataBuckets.length(), equalTo(1)); Assert.assertThat(commentDataBuckets.query("/0/key"), equalTo("ab")); @@ -1196,9 +1364,12 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHistogram() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter" + - "('myFilter',message.info = 'a'),histogram('field'='comment.likes','reverse_nested'='~comment'," + - "'interval'='2' , 'alias' = 'someAlias' )", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter('myFilter',message.info" + + " = 'a'),histogram('field'='comment.likes','reverse_nested'='~comment','interval'='2'" + + " , 'alias' = 'someAlias' )", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1207,8 +1378,10 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(msgInfoBuckets.length(), equalTo(1)); - JSONArray someAliasBuckets = (JSONArray) msgInfoBuckets.optQuery( - "/0/~comment@NESTED_REVERSED/~comment@NESTED/someAlias/buckets"); + JSONArray someAliasBuckets = + (JSONArray) + msgInfoBuckets.optQuery( + "/0/~comment@NESTED_REVERSED/~comment@NESTED/someAlias/buckets"); Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(someAliasBuckets.length(), equalTo(2)); @@ -1216,13 +1389,15 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes expectedCountsByKey.put(0.0, 1); expectedCountsByKey.put(2.0, 1); - someAliasBuckets.forEach(obj -> { - JSONObject bucket = (JSONObject) obj; - final double key = bucket.getDouble("key"); - Assert.assertTrue(expectedCountsByKey.containsKey(key)); - Assert.assertThat(bucket.getJSONObject("COUNT(*)").getInt("value"), - equalTo(expectedCountsByKey.get(key))); - }); + someAliasBuckets.forEach( + obj -> { + JSONObject bucket = (JSONObject) obj; + final double key = bucket.getDouble("key"); + Assert.assertTrue(expectedCountsByKey.containsKey(key)); + Assert.assertThat( + bucket.getJSONObject("COUNT(*)").getInt("value"), + equalTo(expectedCountsByKey.get(key))); + }); } @Test @@ -1230,8 +1405,9 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterAndSumOnReverseNes throws Exception { String query = - String.format("SELECT sum(reverse_nested(comment.likes,'~comment')) bla FROM %s " + - "GROUP BY nested(message.info),filter('myFilter',message.info = 'a')", + String.format( + "SELECT sum(reverse_nested(comment.likes,'~comment')) bla FROM %s " + + "GROUP BY nested(message.info),filter('myFilter',message.info = 'a')", TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1241,10 +1417,11 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterAndSumOnReverseNes Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(msgInfoBuckets.length(), equalTo(1)); - Assert.assertNotNull(msgInfoBuckets.optQuery( - "/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla/value")); - JSONObject bla = (JSONObject) msgInfoBuckets - .query("/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla"); + Assert.assertNotNull( + msgInfoBuckets.optQuery("/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla/value")); + JSONObject bla = + (JSONObject) + msgInfoBuckets.query("/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla"); Assert.assertEquals(4.0, bla.getDouble("value"), 0.000001); } @@ -1257,8 +1434,9 @@ public void docsReturnedTestWithoutDocsHint() throws Exception { @Test public void docsReturnedTestWithDocsHint() throws Exception { - String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", - TEST_INDEX_ACCOUNT); + String query = + String.format( + "SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); Assert.assertThat(getHits(result).length(), equalTo(10)); } @@ -1267,9 +1445,11 @@ public void docsReturnedTestWithDocsHint() throws Exception { @Test public void termsWithScript() throws Exception { String query = - String.format("select count(*), avg(all_client) from %s group by terms('alias'='asdf'," + - " substring(field, 0, 1)), date_histogram('alias'='time', 'field'='timestamp', " + - "'interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", TEST_INDEX_ONLINE); + String.format( + "select count(*), avg(all_client) from %s group by terms('alias'='asdf'," + + " substring(field, 0, 1)), date_histogram('alias'='time', 'field'='timestamp', " + + "'interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", + TEST_INDEX_ONLINE); String result = explainQuery(query); Assert.assertThat(result, containsString("\"script\":{\"source\"")); @@ -1278,9 +1458,10 @@ public void termsWithScript() throws Exception { @Test public void groupByScriptedDateHistogram() throws Exception { - String query = String - .format("select count(*), avg(all_client) from %s group by date_histogram('alias'='time'," + - " ceil(all_client), 'fixed_interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", + String query = + String.format( + "select count(*), avg(all_client) from %s group by date_histogram('alias'='time'," + + " ceil(all_client), 'fixed_interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", TEST_INDEX_ONLINE); String result = explainQuery(query); @@ -1290,9 +1471,10 @@ public void groupByScriptedDateHistogram() throws Exception { @Test public void groupByScriptedHistogram() throws Exception { - String query = String.format( - "select count(*) from %s group by histogram('alias'='all_field', pow(all_client,1))", - TEST_INDEX_ONLINE); + String query = + String.format( + "select count(*) from %s group by histogram('alias'='all_field', pow(all_client,1))", + TEST_INDEX_ONLINE); String result = explainQuery(query); Assert.assertThat(result, containsString("Math.pow(doc['all_client'].value, 1)")); @@ -1303,18 +1485,17 @@ public void groupByScriptedHistogram() throws Exception { public void distinctWithOneField() { Assert.assertEquals( executeQuery("SELECT DISTINCT name.lastname FROM " + TEST_INDEX_GAME_OF_THRONES, "jdbc"), - executeQuery("SELECT name.lastname FROM " + TEST_INDEX_GAME_OF_THRONES - + " GROUP BY name.lastname", "jdbc") - ); + executeQuery( + "SELECT name.lastname FROM " + TEST_INDEX_GAME_OF_THRONES + " GROUP BY name.lastname", + "jdbc")); } @Test public void distinctWithMultipleFields() { Assert.assertEquals( executeQuery("SELECT DISTINCT age, gender FROM " + TEST_INDEX_ACCOUNT, "jdbc"), - executeQuery("SELECT age, gender FROM " + TEST_INDEX_ACCOUNT - + " GROUP BY age, gender", "jdbc") - ); + executeQuery( + "SELECT age, gender FROM " + TEST_INDEX_ACCOUNT + " GROUP BY age, gender", "jdbc")); } private JSONObject getAggregation(final JSONObject queryResult, final String aggregationName) { @@ -1326,26 +1507,27 @@ private JSONObject getAggregation(final JSONObject queryResult, final String agg return aggregations.getJSONObject(aggregationName); } - private int getIntAggregationValue(final JSONObject queryResult, final String aggregationName, - final String fieldName) { + private int getIntAggregationValue( + final JSONObject queryResult, final String aggregationName, final String fieldName) { final JSONObject targetAggregation = getAggregation(queryResult, aggregationName); Assert.assertTrue(targetAggregation.has(fieldName)); return targetAggregation.getInt(fieldName); } - private double getDoubleAggregationValue(final JSONObject queryResult, - final String aggregationName, - final String fieldName) { + private double getDoubleAggregationValue( + final JSONObject queryResult, final String aggregationName, final String fieldName) { final JSONObject targetAggregation = getAggregation(queryResult, aggregationName); Assert.assertTrue(targetAggregation.has(fieldName)); return targetAggregation.getDouble(fieldName); } - private double getDoubleAggregationValue(final JSONObject queryResult, - final String aggregationName, - final String fieldName, final String subFieldName) { + private double getDoubleAggregationValue( + final JSONObject queryResult, + final String aggregationName, + final String fieldName, + final String subFieldName) { final JSONObject targetAggregation = getAggregation(queryResult, aggregationName); Assert.assertTrue(targetAggregation.has(fieldName)); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java index 9952b0c68a..9a416c9683 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.anyOf; @@ -40,9 +39,7 @@ import org.opensearch.client.Response; import org.opensearch.sql.legacy.executor.csv.CSVResult; -/** - * Tests to cover requests with "?format=csv" parameter - */ +/** Tests to cover requests with "?format=csv" parameter */ public class CsvFormatResponseIT extends SQLIntegTestCase { private boolean flatOption = false; @@ -75,16 +72,16 @@ public void allPercentilesByDefault() throws IOException { final String result = executeQueryWithStringOutput(query); final String expectedHeaders = - "PERCENTILES(age).1.0,PERCENTILES(age).5.0,PERCENTILES(age).25.0," + - "PERCENTILES(age).50.0,PERCENTILES(age).75.0,PERCENTILES(age).95.0,PERCENTILES(age).99.0"; + "PERCENTILES(age).1.0,PERCENTILES(age).5.0,PERCENTILES(age).25.0," + + "PERCENTILES(age).50.0,PERCENTILES(age).75.0,PERCENTILES(age).95.0,PERCENTILES(age).99.0"; Assert.assertThat(result, containsString(expectedHeaders)); } @Test public void specificPercentilesIntAndDouble() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT PERCENTILES(age,10,49.0) FROM %s", - TEST_INDEX_ACCOUNT); + final String query = + String.format(Locale.ROOT, "SELECT PERCENTILES(age,10,49.0) FROM %s", TEST_INDEX_ACCOUNT); final String result = executeQueryWithStringOutput(query); final String[] unexpectedPercentiles = {"1.0", "5.0", "25.0", "50.0", "75.0", "95.0", "99.0"}; @@ -92,14 +89,14 @@ public void specificPercentilesIntAndDouble() throws IOException { "\"PERCENTILES(age,10,49.0).10.0\",\"PERCENTILES(age,10,49.0).49.0\""; Assert.assertThat(result, containsString(expectedHeaders)); for (final String unexpectedPercentile : unexpectedPercentiles) { - Assert.assertThat(result, - not(containsString("PERCENTILES(age,10,49.0)." + unexpectedPercentile))); + Assert.assertThat( + result, not(containsString("PERCENTILES(age,10,49.0)." + unexpectedPercentile))); } } public void nestedObjectsAndArraysAreQuoted() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", - TEST_INDEX_NESTED_TYPE); + final String query = + String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", TEST_INDEX_NESTED_TYPE); final String result = executeQueryWithStringOutput(query); final String expectedMyNum = "\"[3, 4]\""; @@ -114,8 +111,8 @@ public void nestedObjectsAndArraysAreQuoted() throws IOException { public void arraysAreQuotedInFlatMode() throws IOException { setFlatOption(true); - final String query = String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", - TEST_INDEX_NESTED_TYPE); + final String query = + String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", TEST_INDEX_NESTED_TYPE); final String result = executeQueryWithStringOutput(query); final String expectedMyNum = "\"[3, 4]\""; @@ -166,15 +163,19 @@ public void fieldOrderOther() throws IOException { public void fieldOrderWithScriptFields() throws IOException { final String[] expectedFields = {"email", "script1", "script2", "gender", "address"}; - final String query = String.format(Locale.ROOT, "SELECT email, " + - "script(script1, \"doc['balance'].value * 2\"), " + - "script(script2, painless, \"doc['balance'].value + 10\"), gender, address " + - "FROM %s WHERE email='amberduke@pyrami.com'", TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT email, " + + "script(script1, \"doc['balance'].value * 2\"), " + + "script(script2, painless, \"doc['balance'].value + 10\"), gender, address " + + "FROM %s WHERE email='amberduke@pyrami.com'", + TEST_INDEX_ACCOUNT); verifyFieldOrder(expectedFields, query); } - //region Tests migrated from CSVResultsExtractorTests + // region Tests migrated from CSVResultsExtractorTests @Test public void simpleSearchResultNotNestedNotFlatNoAggs() throws Exception { @@ -195,8 +196,8 @@ public void simpleSearchResultNotNestedNotFlatNoAggs() throws Exception { @Test public void simpleSearchResultWithNestedNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format(Locale.ROOT, "select name,house from %s", TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -207,21 +208,42 @@ public void simpleSearchResultWithNestedNotFlatNoAggs() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(7, lines.size()); - Assert.assertThat(lines, hasRow(null, "Targaryen", - Arrays.asList("firstname=Daenerys", "lastname=Targaryen", "ofHerName=1"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Eddard", "lastname=Stark", "ofHisName=1"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Brandon", "lastname=Stark", "ofHisName=4"), true)); - Assert.assertThat(lines, hasRow(null, "Lannister", - Arrays.asList("firstname=Jaime", "lastname=Lannister", "ofHisName=1"), true)); + Assert.assertThat( + lines, + hasRow( + null, + "Targaryen", + Arrays.asList("firstname=Daenerys", "lastname=Targaryen", "ofHerName=1"), + true)); + Assert.assertThat( + lines, + hasRow( + null, + "Stark", + Arrays.asList("firstname=Eddard", "lastname=Stark", "ofHisName=1"), + true)); + Assert.assertThat( + lines, + hasRow( + null, + "Stark", + Arrays.asList("firstname=Brandon", "lastname=Stark", "ofHisName=4"), + true)); + Assert.assertThat( + lines, + hasRow( + null, + "Lannister", + Arrays.asList("firstname=Jaime", "lastname=Lannister", "ofHisName=1"), + true)); } @Ignore("headers incorrect in case of nested fields") @Test public void simpleSearchResultWithNestedOneFieldNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name.firstname,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, "select name.firstname,house from %s", TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -235,14 +257,16 @@ public void simpleSearchResultWithNestedOneFieldNotFlatNoAggs() throws Exception Assert.assertThat(lines, hasItem("{firstname=Eddard},Stark")); Assert.assertThat(lines, hasItem("{firstname=Brandon},Stark")); Assert.assertThat(lines, hasItem("{firstname=Jaime},Lannister")); - } @Ignore("headers incorrect in case of nested fields") @Test public void simpleSearchResultWithNestedTwoFieldsFromSameNestedNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name.firstname,name.lastname,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "select name.firstname,name.lastname,house from %s", + TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -253,20 +277,23 @@ public void simpleSearchResultWithNestedTwoFieldsFromSameNestedNotFlatNoAggs() t List lines = csvResult.getLines(); Assert.assertEquals(7, lines.size()); - Assert.assertThat(lines, hasRow(null, "Targaryen", - Arrays.asList("firstname=Daenerys", "lastname=Targaryen"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Eddard", "lastname=Stark"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Brandon", "lastname=Stark"), true)); - Assert.assertThat(lines, hasRow(null, "Lannister", - Arrays.asList("firstname=Jaime", "lastname=Lannister"), true)); + Assert.assertThat( + lines, + hasRow(null, "Targaryen", Arrays.asList("firstname=Daenerys", "lastname=Targaryen"), true)); + Assert.assertThat( + lines, hasRow(null, "Stark", Arrays.asList("firstname=Eddard", "lastname=Stark"), true)); + Assert.assertThat( + lines, hasRow(null, "Stark", Arrays.asList("firstname=Brandon", "lastname=Stark"), true)); + Assert.assertThat( + lines, + hasRow(null, "Lannister", Arrays.asList("firstname=Jaime", "lastname=Lannister"), true)); } @Test public void simpleSearchResultWithNestedWithFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name.firstname,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, "select name.firstname,house from %s", TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, true); List headers = csvResult.getHeaders(); @@ -284,9 +311,12 @@ public void simpleSearchResultWithNestedWithFlatNoAggs() throws Exception { @Test public void joinSearchResultNotNestedNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select c.gender , h.hname,h.words from %s c " + - "JOIN %s h " + - "on h.hname = c.house ", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "select c.gender , h.hname,h.words from %s c " + "JOIN %s h " + "on h.hname = c.house ", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -298,8 +328,8 @@ public void joinSearchResultNotNestedNotFlatNoAggs() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(4, lines.size()); - Assert.assertThat(lines, - hasRow(null, null, Arrays.asList("F", "fireAndBlood", "Targaryen"), false)); + Assert.assertThat( + lines, hasRow(null, null, Arrays.asList("F", "fireAndBlood", "Targaryen"), false)); } @Test @@ -311,7 +341,6 @@ public void simpleNumericValueAgg() throws Exception { Assert.assertEquals(1, headers.size()); Assert.assertEquals("count(*)", headers.get(0)); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("2", lines.get(0)); @@ -327,18 +356,16 @@ public void simpleNumericValueAggWithAlias() throws Exception { Assert.assertEquals(1, headers.size()); Assert.assertEquals("myAlias", headers.get(0)); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("3.0", lines.get(0)); - } @Ignore("only work for legacy engine") public void twoNumericAggWithAlias() throws Exception { String query = - String.format(Locale.ROOT, "select count(*) as count, avg(age) as myAlias from %s ", - TEST_INDEX_DOG); + String.format( + Locale.ROOT, "select count(*) as count, avg(age) as myAlias from %s ", TEST_INDEX_DOG); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -347,7 +374,6 @@ public void twoNumericAggWithAlias() throws Exception { Assert.assertTrue(headers.contains("count")); Assert.assertTrue(headers.contains("myAlias")); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("2,3.0", lines.get(0)); @@ -355,8 +381,8 @@ public void twoNumericAggWithAlias() throws Exception { @Test public void aggAfterTermsGroupBy() throws Exception { - String query = String.format(Locale.ROOT, "SELECT COUNT(*) FROM %s GROUP BY gender", - TEST_INDEX_ACCOUNT); + String query = + String.format(Locale.ROOT, "SELECT COUNT(*) FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(1, headers.size()); @@ -369,9 +395,11 @@ public void aggAfterTermsGroupBy() throws Exception { @Test public void aggAfterTwoTermsGroupBy() throws Exception { - String query = String.format(Locale.ROOT, - "SELECT COUNT(*) FROM %s where age in (35,36) GROUP BY gender,age", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT COUNT(*) FROM %s where age in (35,36) GROUP BY gender,age", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(1, headers.size()); @@ -379,18 +407,17 @@ public void aggAfterTwoTermsGroupBy() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(4, lines.size()); - assertThat(lines, containsInAnyOrder( - equalTo("31"), - equalTo("28"), - equalTo("21"), - equalTo("24"))); + assertThat( + lines, containsInAnyOrder(equalTo("31"), equalTo("28"), equalTo("21"), equalTo("24"))); } @Test public void multipleAggAfterTwoTermsGroupBy() throws Exception { - String query = String.format(Locale.ROOT, - "SELECT COUNT(*) , sum(balance) FROM %s where age in (35,36) GROUP BY gender,age", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT COUNT(*) , sum(balance) FROM %s where age in (35,36) GROUP BY gender,age", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(2, headers.size()); @@ -398,18 +425,23 @@ public void multipleAggAfterTwoTermsGroupBy() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(4, lines.size()); - assertThat(lines, containsInAnyOrder( - equalTo("31,647425"), - equalTo("28,678337"), - equalTo("21,505660"), - equalTo("24,472771"))); + assertThat( + lines, + containsInAnyOrder( + equalTo("31,647425"), + equalTo("28,678337"), + equalTo("21,505660"), + equalTo("24,472771"))); } @Test public void dateHistogramTest() throws Exception { - String query = String.format(Locale.ROOT, "select count(*) from %s" + - " group by date_histogram('field'='insert_time','fixed_interval'='4d','alias'='days')", - TEST_INDEX_ONLINE); + String query = + String.format( + Locale.ROOT, + "select count(*) from %s group by" + + " date_histogram('field'='insert_time','fixed_interval'='4d','alias'='days')", + TEST_INDEX_ONLINE); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(1, headers.size()); @@ -417,10 +449,7 @@ public void dateHistogramTest() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(3, lines.size()); - assertThat(lines, containsInAnyOrder( - equalTo("477.0"), - equalTo("5664.0"), - equalTo("3795.0"))); + assertThat(lines, containsInAnyOrder(equalTo("477.0"), equalTo("5664.0"), equalTo("3795.0"))); } @Test @@ -447,10 +476,16 @@ public void extendedStatsAggregationTest() throws Exception { CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); - final String[] expectedHeaders = {"EXTENDED_STATS(age).count", "EXTENDED_STATS(age).sum", - "EXTENDED_STATS(age).avg", "EXTENDED_STATS(age).min", "EXTENDED_STATS(age).max", - "EXTENDED_STATS(age).sumOfSquares", "EXTENDED_STATS(age).variance", - "EXTENDED_STATS(age).stdDeviation"}; + final String[] expectedHeaders = { + "EXTENDED_STATS(age).count", + "EXTENDED_STATS(age).sum", + "EXTENDED_STATS(age).avg", + "EXTENDED_STATS(age).min", + "EXTENDED_STATS(age).max", + "EXTENDED_STATS(age).sumOfSquares", + "EXTENDED_STATS(age).variance", + "EXTENDED_STATS(age).stdDeviation" + }; Assert.assertEquals(expectedHeaders.length, headers.size()); Assert.assertThat(headers, contains(expectedHeaders)); @@ -466,7 +501,9 @@ public void extendedStatsAggregationTest() throws Exception { @Test public void percentileAggregationTest() throws Exception { String query = - String.format(Locale.ROOT, "select percentiles(age) as per from %s where age > 31", + String.format( + Locale.ROOT, + "select percentiles(age) as per from %s where age > 31", TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -479,7 +516,6 @@ public void percentileAggregationTest() throws Exception { Assert.assertEquals("per.95.0", headers.get(5)); Assert.assertEquals("per.99.0", headers.get(6)); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); @@ -516,9 +552,11 @@ private void assertEquals(String expected, String actual, Double delta) { @Test public void includeScore() throws Exception { - String query = String.format(Locale.ROOT, - "select age, firstname, _score from %s where age > 31 order by _score desc limit 2 ", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "select age, firstname, _score from %s where age > 31 order by _score desc limit 2 ", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false, true, false); List headers = csvResult.getHeaders(); Assert.assertEquals(3, headers.size()); @@ -538,9 +576,11 @@ public void includeScore() throws Exception { @Test public void scriptedField() throws Exception { - String query = String.format(Locale.ROOT, - "select age+1 as agePlusOne ,age , firstname from %s where age = 31 limit 1", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "select age+1 as agePlusOne ,age , firstname from %s where age = 31 limit 1", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(3, headers.size()); @@ -548,8 +588,11 @@ public void scriptedField() throws Exception { Assert.assertTrue(headers.contains("age")); Assert.assertTrue(headers.contains("firstname")); List lines = csvResult.getLines(); - Assert.assertTrue(lines.get(0).contains("32,31") || lines.get(0).contains("32.0,31.0") || - lines.get(0).contains("31,32") || lines.get(0).contains("31.0,32.0")); + Assert.assertTrue( + lines.get(0).contains("32,31") + || lines.get(0).contains("32.0,31.0") + || lines.get(0).contains("31,32") + || lines.get(0).contains("31.0,32.0")); } @Ignore("separator not exposed") @@ -568,13 +611,15 @@ public void twoCharsSeperator() throws Exception { Assert.assertEquals(2, lines.size()); Assert.assertTrue("rex||2".equals(lines.get(0)) || "2||rex".equals(lines.get(0))); Assert.assertTrue("snoopy||4".equals(lines.get(1)) || "4||snoopy".equals(lines.get(1))); - } @Ignore("tested in @see: org.opensearch.sql.sql.IdentifierIT.testMetafieldIdentifierTest") public void includeIdAndNotTypeOrScore() throws Exception { - String query = String.format(Locale.ROOT, - "select age, firstname, _id from %s where lastname = 'Marquez' ", TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "select age, firstname, _id from %s where lastname = 'Marquez' ", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false, false, true); List headers = csvResult.getHeaders(); Assert.assertEquals(3, headers.size()); @@ -584,15 +629,16 @@ public void includeIdAndNotTypeOrScore() throws Exception { List lines = csvResult.getLines(); Assert.assertTrue(lines.get(0).contains(",437") || lines.get(0).contains("437,")); } - //endregion Tests migrated from CSVResultsExtractorTests + + // endregion Tests migrated from CSVResultsExtractorTests @Ignore("only work for legacy engine") public void sensitiveCharacterSanitizeTest() throws IOException { String requestBody = - "{" + - " \"=cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad'!_xlbgnm.A1\",\n" + - " \"-cmd|' /C notepad'!_xlbgnm.A1\": \"@cmd|' /C notepad'!_xlbgnm.A1\"\n" + - "}"; + "{" + + " \"=cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad'!_xlbgnm.A1\",\n" + + " \"-cmd|' /C notepad'!_xlbgnm.A1\": \"@cmd|' /C notepad'!_xlbgnm.A1\"\n" + + "}"; Request request = new Request("PUT", "/userdata/_doc/1?refresh=true"); request.setJsonEntity(requestBody); @@ -613,11 +659,11 @@ public void sensitiveCharacterSanitizeTest() throws IOException { @Ignore("only work for legacy engine") public void sensitiveCharacterSanitizeAndQuotedTest() throws IOException { String requestBody = - "{" + - " \"=cmd|' /C notepad'!_xlbgnm.A1,,\": \",+cmd|' /C notepad'!_xlbgnm.A1\",\n" + - " \",@cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad,,'!_xlbgnm.A1\",\n" + - " \"-cmd|' /C notepad,,'!_xlbgnm.A1\": \",,,@cmd|' /C notepad'!_xlbgnm.A1\"\n" + - "}"; + "{" + + " \"=cmd|' /C notepad'!_xlbgnm.A1,,\": \",+cmd|' /C notepad'!_xlbgnm.A1\",\n" + + " \",@cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad,,'!_xlbgnm.A1\",\n" + + " \"-cmd|' /C notepad,,'!_xlbgnm.A1\": \",,,@cmd|' /C notepad'!_xlbgnm.A1\"\n" + + "}"; Request request = new Request("PUT", "/userdata2/_doc/1?refresh=true"); request.setJsonEntity(requestBody); @@ -638,8 +684,11 @@ public void sensitiveCharacterSanitizeAndQuotedTest() throws IOException { @Test public void sanitizeTest() throws IOException { - CSVResult csvResult = executeCsvRequest( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), false); + CSVResult csvResult = + executeCsvRequest( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), + false); List lines = csvResult.getLines(); assertEquals(5, lines.size()); assertEquals(lines.get(0), "'+Amber JOHnny,Duke Willmington+"); @@ -660,8 +709,12 @@ public void selectFunctionAsFieldTest() throws IOException { private void verifyFieldOrder(final String[] expectedFields) throws IOException { final String fields = String.join(", ", expectedFields); - final String query = String.format(Locale.ROOT, "SELECT %s FROM %s " + - "WHERE email='amberduke@pyrami.com'", fields, TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT %s FROM %s " + "WHERE email='amberduke@pyrami.com'", + fields, + TEST_INDEX_ACCOUNT); verifyFieldOrder(expectedFields, query); } @@ -685,13 +738,18 @@ private CSVResult executeCsvRequest(final String query, boolean flat) throws IOE return executeCsvRequest(query, flat, false, false); } - private CSVResult executeCsvRequest(final String query, boolean flat, boolean includeScore, - boolean includeId) throws IOException { + private CSVResult executeCsvRequest( + final String query, boolean flat, boolean includeScore, boolean includeId) + throws IOException { final String requestBody = super.makeRequest(query); - final String endpoint = String.format(Locale.ROOT, - "/_plugins/_sql?format=csv&flat=%b&_id=%b&_score=%b", - flat, includeId, includeScore); + final String endpoint = + String.format( + Locale.ROOT, + "/_plugins/_sql?format=csv&flat=%b&_id=%b&_score=%b", + flat, + includeId, + includeScore); final Request sqlRequest = new Request("POST", endpoint); sqlRequest.setJsonEntity(requestBody); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); @@ -729,22 +787,32 @@ private CSVResult csvResultFromStringResponse(final String response) { return new CSVResult(headers, rows); } - private static AnyOf> hasRow(final String prefix, final String suffix, - final List items, - final boolean areItemsNested) { + private static AnyOf> hasRow( + final String prefix, + final String suffix, + final List items, + final boolean areItemsNested) { final Collection> permutations = TestUtils.getPermutations(items); - final List>> matchers = permutations.stream().map(permutation -> { - - final String delimiter = areItemsNested ? ", " : ","; - final String objectField = String.join(delimiter, permutation); - final String row = String.format(Locale.ROOT, "%s%s%s%s%s", - printablePrefix(prefix), areItemsNested ? "\"{" : "", - objectField, areItemsNested ? "}\"" : "", printableSuffix(suffix)); - return hasItem(row); - - }).collect(Collectors.toCollection(LinkedList::new)); + final List>> matchers = + permutations.stream() + .map( + permutation -> { + final String delimiter = areItemsNested ? ", " : ","; + final String objectField = String.join(delimiter, permutation); + final String row = + String.format( + Locale.ROOT, + "%s%s%s%s%s", + printablePrefix(prefix), + areItemsNested ? "\"{" : "", + objectField, + areItemsNested ? "}\"" : "", + printableSuffix(suffix)); + return hasItem(row); + }) + .collect(Collectors.toCollection(LinkedList::new)); return anyOf(matchers); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java index b246bb6224..abd2bbbcc2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -43,9 +42,8 @@ protected void init() throws Exception { } /** - * Acceptable fetch_size are positive numbers. - * For example 0, 24, 53.0, "110" (parsable string) , "786.23" - * Negative values should throw 400 + * Acceptable fetch_size are positive numbers. For example 0, 24, 53.0, "110" (parsable string) , + * "786.23". Negative values should throw 400. */ @Test public void invalidNegativeFetchSize() throws IOException { @@ -65,9 +63,7 @@ public void invalidNegativeFetchSize() throws IOException { assertThat(resp.query("/error/type"), equalTo("IllegalArgumentException")); } - /** - * Non-numeric fetch_size value should throw 400 - */ + /** Non-numeric fetch_size value should throw 400 */ @Test public void invalidNonNumericFetchSize() throws IOException { String query = @@ -105,19 +101,22 @@ public void testExceptionOnCursorExplain() throws IOException { } /** - * For fetch_size = 0, default to non-pagination behaviour for simple queries - * This can be verified by checking that cursor is not present, and old default limit applies + * For fetch_size = 0, default to non-pagination behaviour for simple queries This can be verified + * by checking that cursor is not present, and old default limit applies */ @Test public void noPaginationWhenFetchSizeZero() throws IOException { String selectQuery = StringUtils.format("SELECT firstname, state FROM %s", TEST_INDEX_ACCOUNT); JSONObject response = new JSONObject(executeFetchQuery(selectQuery, 0, JDBC)); assertFalse(response.has(CURSOR)); - assertThat(response.getJSONArray(DATAROWS).length(), equalTo(1000)); // Default limit is 1000 in new engine + assertThat( + response.getJSONArray(DATAROWS).length(), + equalTo(1000)); // Default limit is 1000 in new engine } /** - * The index has 1000 records, with fetch size of 50 we should get 20 pages with no cursor on last page + * The index has 1000 records, with fetch size of 50 we should get 20 pages with no cursor on last + * page */ @Test public void validNumberOfPages() throws IOException { @@ -128,7 +127,7 @@ public void validNumberOfPages() throws IOException { int pageCount = 1; - while (!cursor.isEmpty()) { //this condition also checks that there is no cursor on last page + while (!cursor.isEmpty()) { // this condition also checks that there is no cursor on last page response = executeCursorQuery(cursor); cursor = response.optString(CURSOR); if (!cursor.isEmpty()) { @@ -162,7 +161,6 @@ public void validNumberOfPages() throws IOException { assertThat(pageCount, equalTo(36)); } - @Test public void validTotalResultWithAndWithoutPagination() throws IOException { // simple query - accounts index has 1000 docs, using higher limit to get all docs @@ -172,72 +170,78 @@ public void validTotalResultWithAndWithoutPagination() throws IOException { @Test public void validTotalResultWithAndWithoutPaginationWhereClause() throws IOException { - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s WHERE balance < 25000 AND age > 32", TEST_INDEX_ACCOUNT - ); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s WHERE balance < 25000 AND age > 32", + TEST_INDEX_ACCOUNT); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 17, false); } @Test public void validTotalResultWithAndWithoutPaginationOrderBy() throws IOException { - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s ORDER BY balance DESC ", TEST_INDEX_ACCOUNT - ); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s ORDER BY balance DESC ", TEST_INDEX_ACCOUNT); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 26, false); } @Test public void validTotalResultWithAndWithoutPaginationWhereAndOrderBy() throws IOException { - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s WHERE balance < 25000 ORDER BY balance ASC ", - TEST_INDEX_ACCOUNT - ); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s WHERE balance < 25000 ORDER BY balance ASC ", + TEST_INDEX_ACCOUNT); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 80, false); } @Test public void validTotalResultWithAndWithoutPaginationNested() throws IOException { loadIndex(Index.NESTED_SIMPLE); - String selectQuery = StringUtils.format( - "SELECT name, a.city, a.state FROM %s m , m.address as a ", TEST_INDEX_NESTED_SIMPLE - ); + String selectQuery = + StringUtils.format( + "SELECT name, a.city, a.state FROM %s m , m.address as a ", TEST_INDEX_NESTED_SIMPLE); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 1, true); } @Test public void noCursorWhenResultsLessThanFetchSize() throws IOException { // fetch_size is 100, but actual number of rows returned from OpenSearch is 97 - // a scroll context will be opened but will be closed after first page as all records are fetched - String selectQuery = StringUtils.format( - "SELECT * FROM %s WHERE balance < 25000 AND age > 36 LIMIT 2000", TEST_INDEX_ACCOUNT - ); + // a scroll context will be opened but will be closed after first page as all records are + // fetched + String selectQuery = + StringUtils.format( + "SELECT * FROM %s WHERE balance < 25000 AND age > 36 LIMIT 2000", TEST_INDEX_ACCOUNT); JSONObject response = new JSONObject(executeFetchQuery(selectQuery, 100, JDBC)); assertFalse(response.has(CURSOR)); } @Ignore("Temporary deactivate the test until parameter substitution implemented in V2") - // Test was passing before, because such paging query was executed in V1, but now it is executed in V2 + // Test was passing before, because such paging query was executed in V1, but now it is executed + // in V2 @Test public void testCursorWithPreparedStatement() throws IOException { - JSONObject response = executeJDBCRequest(String.format("{" + - " \"fetch_size\": 200," + - " \"query\": \" SELECT age, state FROM %s WHERE age > ? OR state IN (?, ?)\"," + - " \"parameters\": [" + - " {" + - " \"type\": \"integer\"," + - " \"value\": 25" + - " }," + - " {" + - " \"type\": \"string\"," + - " \"value\": \"WA\"" + - " }," + - " {" + - " \"type\": \"string\"," + - " \"value\": \"UT\"" + - " }" + - " ]" + - "}", TestsConstants.TEST_INDEX_ACCOUNT)); - + JSONObject response = + executeJDBCRequest( + String.format( + "{" + + "\"fetch_size\": 200," + + "\"query\": \" SELECT age, state FROM %s WHERE age > ? OR state IN (?, ?)\"," + + "\"parameters\": [" + + " {" + + " \"type\": \"integer\"," + + " \"value\": 25" + + " }," + + " {" + + " \"type\": \"string\"," + + " \"value\": \"WA\"" + + " }," + + " {" + + " \"type\": \"string\"," + + " \"value\": \"UT\"" + + " }" + + "]" + + "}" + + TestsConstants.TEST_INDEX_ACCOUNT)); assertTrue(response.has(CURSOR)); verifyIsV1Cursor(response.getString(CURSOR)); } @@ -247,15 +251,16 @@ public void testRegressionOnDateFormatChange() throws IOException { loadIndex(Index.DATETIME); /** * With pagination, the field should be date formatted to MySQL format as in - * @see PR #367PR #367 * TEST_INDEX_DATE_TIME has three docs with login_time as date field with following values * 1.2015-01-01 * 2.2015-01-01T12:10:30Z * 3.1585882955 * 4.2020-04-08T11:10:30+05:00 + * */ - List actualDateList = new ArrayList<>(); String selectQuery = StringUtils.format("SELECT login_time FROM %s LIMIT 500", TEST_INDEX_DATE_TIME); @@ -271,16 +276,16 @@ public void testRegressionOnDateFormatChange() throws IOException { actualDateList.add(response.getJSONArray(DATAROWS).getJSONArray(0).getString(0)); } - List expectedDateList = Arrays.asList( - "2015-01-01 00:00:00.000", - "2015-01-01 12:10:30.000", - "1585882955", // by existing design, this is not formatted in MySQL standard format - "2020-04-08 06:10:30.000"); + List expectedDateList = + Arrays.asList( + "2015-01-01 00:00:00.000", + "2015-01-01 12:10:30.000", + "1585882955", // by existing design, this is not formatted in MySQL standard format + "2020-04-08 06:10:30.000"); assertThat(actualDateList, equalTo(expectedDateList)); } - @Ignore("Breaking change for OpenSearch: deprecate and enable cursor always") @Test public void defaultBehaviorWhenCursorSettingIsDisabled() throws IOException { @@ -296,7 +301,6 @@ public void defaultBehaviorWhenCursorSettingIsDisabled() throws IOException { wipeAllClusterSettings(); } - @Test public void testCursorSettings() throws IOException { // Assert default cursor settings @@ -307,13 +311,11 @@ public void testCursorSettings() throws IOException { new ClusterSetting(PERSISTENT, Settings.Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), "200s")); clusterSettings = getAllClusterSettings(); - assertThat(clusterSettings.query("/persistent/plugins.sql.cursor.keep_alive"), - equalTo("200s")); + assertThat(clusterSettings.query("/persistent/plugins.sql.cursor.keep_alive"), equalTo("200s")); wipeAllClusterSettings(); } - @Ignore("Breaking change for OpenSearch: no pagination if fetch_size field absent in request") @Test public void testDefaultFetchSizeFromClusterSettings() throws IOException { @@ -339,8 +341,9 @@ public void testDefaultFetchSizeFromClusterSettings() throws IOException { public void testCursorCloseAPI() throws IOException { // multiple invocation of closing cursor should return success // fetch page using old cursor should throw error - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s WHERE balance > 100 and age < 40", TEST_INDEX_ACCOUNT); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s WHERE balance > 100 and age < 40", TEST_INDEX_ACCOUNT); JSONObject result = new JSONObject(executeFetchQuery(selectQuery, 50, JDBC)); String cursor = result.getString(CURSOR); verifyIsV2Cursor(result); @@ -350,11 +353,11 @@ public void testCursorCloseAPI() throws IOException { cursor = result.optString(CURSOR); verifyIsV2Cursor(result); } - //Closing the cursor + // Closing the cursor JSONObject closeResp = executeCursorCloseQuery(cursor); assertThat(closeResp.getBoolean("succeeded"), equalTo(true)); - //Closing the cursor multiple times is idempotent + // Closing the cursor multiple times is idempotent for (int i = 0; i < 5; i++) { closeResp = executeCursorCloseQuery(cursor); assertThat(closeResp.getBoolean("succeeded"), equalTo(true)); @@ -371,8 +374,7 @@ public void testCursorCloseAPI() throws IOException { JSONObject resp = new JSONObject(TestUtils.getResponseBody(response)); assertThat(resp.getInt("status"), equalTo(404)); assertThat(resp.query("/error/reason").toString(), containsString("all shards failed")); - assertThat(resp.query("/error/details").toString(), - containsString("No search context found")); + assertThat(resp.query("/error/details").toString(), containsString("No search context found")); assertThat(resp.query("/error/type"), equalTo("SearchPhaseExecutionException")); } @@ -395,9 +397,9 @@ public void invalidCursorIdNotDecodable() throws IOException { } /** - * The index has 1000 records, with fetch size of 50 and LIMIT in place - * we should get Math.ceil(limit/fetchSize) pages and LIMIT number of rows. - * Basically it should not retrieve all records in presence of a smaller LIMIT value. + * The index has 1000 records, with fetch size of 50 and LIMIT in place we should get + * Math.ceil(limit/fetchSize) pages and LIMIT number of rows. Basically it should not retrieve all + * records in presence of a smaller LIMIT value. */ @Test public void respectLimitPassedInSelectClause() throws IOException { @@ -422,7 +424,6 @@ public void respectLimitPassedInSelectClause() throws IOException { assertThat(actualDataRowCount, equalTo(limit)); } - @Test public void noPaginationWithNonJDBCFormat() throws IOException { // checking for CSV, RAW format @@ -439,10 +440,9 @@ public void noPaginationWithNonJDBCFormat() throws IOException { assertThat(rows.length, equalTo(1000)); } - - public void verifyWithAndWithoutPaginationResponse(String sqlQuery, String cursorQuery, - int fetch_size, boolean shouldFallBackToV1) - throws IOException { + public void verifyWithAndWithoutPaginationResponse( + String sqlQuery, String cursorQuery, int fetch_size, boolean shouldFallBackToV1) + throws IOException { // we are only checking here for schema and datarows JSONObject withoutCursorResponse = new JSONObject(executeFetchQuery(sqlQuery, 0, JDBC)); @@ -473,10 +473,10 @@ public void verifyWithAndWithoutPaginationResponse(String sqlQuery, String curso } } - verifySchema(withoutCursorResponse.optJSONArray(SCHEMA), - withCursorResponse.optJSONArray(SCHEMA)); - verifyDataRows(withoutCursorResponse.optJSONArray(DATAROWS), - withCursorResponse.optJSONArray(DATAROWS)); + verifySchema( + withoutCursorResponse.optJSONArray(SCHEMA), withCursorResponse.optJSONArray(SCHEMA)); + verifyDataRows( + withoutCursorResponse.optJSONArray(DATAROWS), withCursorResponse.optJSONArray(DATAROWS)); } public void verifySchema(JSONArray schemaOne, JSONArray schemaTwo) { @@ -504,14 +504,14 @@ private void verifyIsV1Cursor(String cursor) { if (cursor.isEmpty()) { return; } - assertTrue("The cursor '" + cursor.substring(0, 50) + "...' is not from v1 engine.", cursor.startsWith("d:")); + assertTrue( + "The cursor '" + cursor.substring(0, 50) + "...' is not from v1 engine.", + cursor.startsWith("d:")); } private String makeRequest(String query, String fetch_size) { - return String.format("{" + - " \"fetch_size\": \"%s\"," + - " \"query\": \"%s\"" + - "}", fetch_size, query); + return String.format( + "{" + " \"fetch_size\": \"%s\"," + " \"query\": \"%s\"" + "}", fetch_size, query); } private JSONObject executeJDBCRequest(String requestBody) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java index a0b4b19898..388d900924 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.contains; @@ -33,8 +32,7 @@ public class DateFormatIT extends SQLIntegTestCase { private static final String SELECT_FROM = - "SELECT insert_time " + - "FROM " + TestsConstants.TEST_INDEX_ONLINE + " "; + "SELECT insert_time " + "FROM " + TestsConstants.TEST_INDEX_ONLINE + " "; @Override protected void init() throws Exception { @@ -42,21 +40,20 @@ protected void init() throws Exception { } /** - * All of the following tests use UTC as their date_format timezone as this is the same timezone of the data - * being queried. This is to prevent discrepancies in the OpenSearch query and the actual field data that is - * being checked for the integration tests. - *

- * Large LIMIT values were given for some of these queries since the default result size of the query is 200 and - * this ends up excluding some of the expected values causing the assertion to fail. LIMIT overrides this. + * All the following tests use UTC as their date_format timezone as this is the same timezone of + * the data being queried. This is to prevent discrepancies in the OpenSearch query and the actual + * field data that is being checked for the integration tests. + * + *

Large LIMIT values were given for some of these queries since the default result size of the + * query is 200 and this ends up excluding some of the expected values causing the assertion to + * fail. LIMIT overrides this. */ - @Test public void equalTo() throws SqlParseException { assertThat( dateQuery( SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') = '2014-08-17'"), - contains("2014-08-17") - ); + contains("2014-08-17")); } @Test @@ -64,19 +61,18 @@ public void lessThan() throws SqlParseException { assertThat( dateQuery( SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') < '2014-08-18'"), - contains("2014-08-17") - ); + contains("2014-08-17")); } @Test public void lessThanOrEqualTo() throws SqlParseException { assertThat( dateQuery( - SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-18' " + - "ORDER BY insert_time " + - "LIMIT 1000"), - contains("2014-08-17", "2014-08-18") - ); + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-18' " + + "ORDER BY insert_time " + + "LIMIT 1000"), + contains("2014-08-17", "2014-08-18")); } @Test @@ -84,92 +80,101 @@ public void greaterThan() throws SqlParseException { assertThat( dateQuery( SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-08-23'"), - contains("2014-08-24") - ); + contains("2014-08-24")); } @Test public void greaterThanOrEqualTo() throws SqlParseException { assertThat( dateQuery( - SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-23' " + - "ORDER BY insert_time " + - "LIMIT 2000"), - contains("2014-08-23", "2014-08-24") - ); + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-23' " + + "ORDER BY insert_time " + + "LIMIT 2000"), + contains("2014-08-23", "2014-08-24")); } @Test public void and() throws SqlParseException { assertThat( - dateQuery(SELECT_FROM + - "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-21' " + - "AND date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-23' " + - "ORDER BY insert_time " + - "LIMIT 3000"), - contains("2014-08-21", "2014-08-22", "2014-08-23") - ); + dateQuery( + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-21' " + + "AND date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-23' " + + "ORDER BY insert_time " + + "LIMIT 3000"), + contains("2014-08-21", "2014-08-22", "2014-08-23")); } @Test public void andWithDefaultTimeZone() throws SqlParseException { assertThat( - dateQuery(SELECT_FROM + - "WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + - "AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'", + dateQuery( + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + + "AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'", "yyyy-MM-dd HH:mm:ss"), - contains("2014-08-17 16:13:12") - ); + contains("2014-08-17 16:13:12")); } @Test public void or() throws SqlParseException { assertThat( - dateQuery(SELECT_FROM + - "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') < '2014-08-18' " + - "OR date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-08-23' " + - "ORDER BY insert_time " + - "LIMIT 1000"), - contains("2014-08-17", "2014-08-24") - ); + dateQuery( + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') < '2014-08-18' " + + "OR date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-08-23' " + + "ORDER BY insert_time " + + "LIMIT 1000"), + contains("2014-08-17", "2014-08-24")); } - @Test public void sortByDateFormat() throws IOException { - // Sort by expression in descending order, but sort inside in ascending order, so we increase our confidence + // Sort by expression in descending order, but sort inside in ascending order, so we increase + // our confidence // that successful test isn't just random chance. JSONArray hits = - getHits(executeQuery("SELECT all_client, insert_time " + - " FROM " + TestsConstants.TEST_INDEX_ONLINE + - " ORDER BY date_format(insert_time, 'dd-MM-YYYY', 'UTC') DESC, insert_time " + - " LIMIT 10")); + getHits( + executeQuery( + "SELECT all_client, insert_time " + + " FROM " + + TestsConstants.TEST_INDEX_ONLINE + + " ORDER BY date_format(insert_time, 'dd-MM-YYYY', 'UTC') DESC, insert_time " + + " LIMIT 10")); - assertThat(new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), + assertThat( + new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), is(new DateTime("2014-08-24T00:00:41.221Z", DateTimeZone.UTC))); } @Test public void sortByAliasedDateFormat() throws IOException { JSONArray hits = - getHits(executeQuery( - "SELECT all_client, insert_time, date_format(insert_time, 'dd-MM-YYYY', 'UTC') date" + - " FROM " + TestsConstants.TEST_INDEX_ONLINE + - " ORDER BY date DESC, insert_time " + - " LIMIT 10")); + getHits( + executeQuery( + "SELECT all_client, insert_time, date_format(insert_time, 'dd-MM-YYYY', 'UTC')" + + " date FROM " + + TestsConstants.TEST_INDEX_ONLINE + + " ORDER BY date DESC, insert_time " + + " LIMIT 10")); - assertThat(new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), + assertThat( + new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), is(new DateTime("2014-08-24T00:00:41.221Z", DateTimeZone.UTC))); } @Ignore("skip this test due to inconsistency in type in new engine") @Test public void selectDateTimeWithDefaultTimeZone() throws SqlParseException { - JSONObject response = executeJdbcRequest("SELECT date_format(insert_time, 'yyyy-MM-dd') as date " + - " FROM " + TestsConstants.TEST_INDEX_ONLINE + - " WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + - " AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'"); + JSONObject response = + executeJdbcRequest( + "SELECT date_format(insert_time, 'yyyy-MM-dd') as date " + + " FROM " + + TestsConstants.TEST_INDEX_ONLINE + + " WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + + " AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'"); verifySchema(response, schema("date", "", "text")); verifyDataRows(response, rows("2014-08-17")); @@ -177,52 +182,57 @@ public void selectDateTimeWithDefaultTimeZone() throws SqlParseException { @Test public void groupByAndSort() throws IOException { - JSONObject aggregations = executeQuery( - "SELECT date_format(insert_time, 'dd-MM-YYYY') " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date_format(insert_time, 'dd-MM-YYYY') " + - "ORDER BY date_format(insert_time, 'dd-MM-YYYY') DESC") - .getJSONObject("aggregations"); + JSONObject aggregations = + executeQuery( + "SELECT date_format(insert_time, 'dd-MM-YYYY') " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date_format(insert_time, 'dd-MM-YYYY') " + + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') DESC") + .getJSONObject("aggregations"); checkAggregations(aggregations, "date_format", Ordering.natural().reverse()); } @Test public void groupByAndSortAliasedReversed() throws IOException { - JSONObject aggregations = executeQuery( - "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date " + - "ORDER BY date DESC") - .getJSONObject("aggregations"); + JSONObject aggregations = + executeQuery( + "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date " + + "ORDER BY date DESC") + .getJSONObject("aggregations"); checkAggregations(aggregations, "date", Ordering.natural().reverse()); } @Test public void groupByAndSortAliased() throws IOException { - JSONObject aggregations = executeQuery( - "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date " + - "ORDER BY date ") - .getJSONObject("aggregations"); + JSONObject aggregations = + executeQuery( + "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date " + + "ORDER BY date ") + .getJSONObject("aggregations"); checkAggregations(aggregations, "date", Ordering.natural()); } - private void checkAggregations(JSONObject aggregations, String key, - Ordering ordering) { + private void checkAggregations( + JSONObject aggregations, String key, Ordering ordering) { String date = getScriptAggregationKey(aggregations, key); JSONArray buckets = aggregations.getJSONObject(date).getJSONArray("buckets"); assertThat(buckets.length(), is(8)); - List aggregationSortKeys = IntStream.range(0, 8) - .mapToObj(index -> buckets.getJSONObject(index).getString("key")) - .collect(Collectors.toList()); + List aggregationSortKeys = + IntStream.range(0, 8) + .mapToObj(index -> buckets.getJSONObject(index).getString("key")) + .collect(Collectors.toList()); - assertTrue("The query result must be sorted by date in descending order", + assertTrue( + "The query result must be sorted by date in descending order", ordering.isOrdered(aggregationSortKeys)); } @@ -239,7 +249,8 @@ private Set dateQuery(String sql, String format) throws SqlParseExceptio } } - private Set getResult(JSONObject response, String fieldName, DateTimeFormatter formatter) { + private Set getResult( + JSONObject response, String fieldName, DateTimeFormatter formatter) { JSONArray hits = getHits(response); Set result = new TreeSet<>(); // Using TreeSet so order is maintained for (int i = 0; i < hits.length(); i++) { @@ -255,11 +266,11 @@ private Set getResult(JSONObject response, String fieldName, DateTimeFor } public static String getScriptAggregationKey(JSONObject aggregation, String prefix) { - return aggregation.keySet() - .stream() + return aggregation.keySet().stream() .filter(x -> x.startsWith(prefix)) .findFirst() - .orElseThrow(() -> new RuntimeException( - "Can't find key" + prefix + " in aggregation " + aggregation)); + .orElseThrow( + () -> + new RuntimeException("Can't find key" + prefix + " in aggregation " + aggregation)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java index 369984d0a3..d9a6849fc8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -31,13 +30,13 @@ public class DateFunctionsIT extends SQLIntegTestCase { private static final String FROM = "FROM " + TestsConstants.TEST_INDEX_ONLINE; /** - * Some of the first few SQL functions are tested in both SELECT and WHERE cases for flexibility and the remainder - * are merely tested in SELECT for simplicity. - *

- * There is a limitation in all date SQL functions in that they expect a date field as input. In the future this - * can be expanded on by supporting CAST and casting dates given as Strings to TIMESTAMP (SQL's date type). + * Some of the first few SQL functions are tested in both SELECT and WHERE cases for flexibility + * and the remainder are merely tested in SELECT for simplicity. + * + *

There is a limitation in all date SQL functions in that they expect a date field as input. + * In the future this can be expanded on by supporting CAST and casting dates given as Strings to + * TIMESTAMP (SQL's date type). */ - @Override protected void init() throws Exception { loadIndex(Index.ONLINE); @@ -45,9 +44,7 @@ protected void init() throws Exception { @Test public void year() throws IOException { - SearchHit[] hits = query( - "SELECT YEAR(insert_time) as year" - ); + SearchHit[] hits = query("SELECT YEAR(insert_time) as year"); for (SearchHit hit : hits) { int year = (int) getField(hit, "year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -57,9 +54,7 @@ public void year() throws IOException { @Test public void monthOfYear() throws IOException { - SearchHit[] hits = query( - "SELECT MONTH_OF_YEAR(insert_time) as month_of_year" - ); + SearchHit[] hits = query("SELECT MONTH_OF_YEAR(insert_time) as month_of_year"); for (SearchHit hit : hits) { int monthOfYear = (int) getField(hit, "month_of_year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -69,9 +64,7 @@ public void monthOfYear() throws IOException { @Test public void weekOfYearInSelect() throws IOException { - SearchHit[] hits = query( - "SELECT WEEK_OF_YEAR(insert_time) as week_of_year" - ); + SearchHit[] hits = query("SELECT WEEK_OF_YEAR(insert_time) as week_of_year"); for (SearchHit hit : hits) { int weekOfYear = (int) getField(hit, "week_of_year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -81,12 +74,12 @@ public void weekOfYearInSelect() throws IOException { @Test public void weekOfYearInWhere() throws IOException { - SearchHit[] hits = query( - "SELECT insert_time", - "WHERE DATE_FORMAT(insert_time, 'YYYY-MM-dd') < '2014-08-19' AND " + - "WEEK_OF_YEAR(insert_time) > 33", - "LIMIT 2000" - ); + SearchHit[] hits = + query( + "SELECT insert_time", + "WHERE DATE_FORMAT(insert_time, 'YYYY-MM-dd') < '2014-08-19' AND " + + "WEEK_OF_YEAR(insert_time) > 33", + "LIMIT 2000"); for (SearchHit hit : hits) { DateTime insertTime = getDateFromSource(hit, "insert_time"); assertThat(insertTime.weekOfWeekyear().get(), greaterThan(33)); @@ -95,9 +88,7 @@ public void weekOfYearInWhere() throws IOException { @Test public void dayOfYearInSelect() throws IOException { - SearchHit[] hits = query( - "SELECT DAY_OF_YEAR(insert_time) as day_of_year", "LIMIT 2000" - ); + SearchHit[] hits = query("SELECT DAY_OF_YEAR(insert_time) as day_of_year", "LIMIT 2000"); for (SearchHit hit : hits) { int dayOfYear = (int) getField(hit, "day_of_year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -107,9 +98,8 @@ public void dayOfYearInSelect() throws IOException { @Test public void dayOfYearInWhere() throws IOException { - SearchHit[] hits = query( - "SELECT insert_time", "WHERE DAY_OF_YEAR(insert_time) < 233", "LIMIT 10000" - ); + SearchHit[] hits = + query("SELECT insert_time", "WHERE DAY_OF_YEAR(insert_time) < 233", "LIMIT 10000"); for (SearchHit hit : hits) { DateTime insertTime = getDateFromSource(hit, "insert_time"); assertThat(insertTime.dayOfYear().get(), lessThan(233)); @@ -118,9 +108,7 @@ public void dayOfYearInWhere() throws IOException { @Test public void dayOfMonthInSelect() throws IOException { - SearchHit[] hits = query( - "SELECT DAY_OF_MONTH(insert_time) as day_of_month", "LIMIT 2000" - ); + SearchHit[] hits = query("SELECT DAY_OF_MONTH(insert_time) as day_of_month", "LIMIT 2000"); for (SearchHit hit : hits) { int dayOfMonth = (int) getField(hit, "day_of_month"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -130,9 +118,8 @@ public void dayOfMonthInSelect() throws IOException { @Test public void dayOfMonthInWhere() throws IOException { - SearchHit[] hits = query( - "SELECT insert_time", "WHERE DAY_OF_MONTH(insert_time) < 21", "LIMIT 10000" - ); + SearchHit[] hits = + query("SELECT insert_time", "WHERE DAY_OF_MONTH(insert_time) < 21", "LIMIT 10000"); for (SearchHit hit : hits) { DateTime insertTime = getDateFromSource(hit, "insert_time"); assertThat(insertTime.dayOfMonth().get(), lessThan(21)); @@ -141,9 +128,7 @@ public void dayOfMonthInWhere() throws IOException { @Test public void dayOfWeek() throws IOException { - SearchHit[] hits = query( - "SELECT DAY_OF_WEEK(insert_time) as day_of_week", "LIMIT 2000" - ); + SearchHit[] hits = query("SELECT DAY_OF_WEEK(insert_time) as day_of_week", "LIMIT 2000"); for (SearchHit hit : hits) { int dayOfWeek = (int) getField(hit, "day_of_week"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -153,9 +138,7 @@ public void dayOfWeek() throws IOException { @Test public void hourOfDay() throws IOException { - SearchHit[] hits = query( - "SELECT HOUR_OF_DAY(insert_time) as hour_of_day", "LIMIT 1000" - ); + SearchHit[] hits = query("SELECT HOUR_OF_DAY(insert_time) as hour_of_day", "LIMIT 1000"); for (SearchHit hit : hits) { int hourOfDay = (int) getField(hit, "hour_of_day"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -165,9 +148,7 @@ public void hourOfDay() throws IOException { @Test public void minuteOfDay() throws IOException { - SearchHit[] hits = query( - "SELECT MINUTE_OF_DAY(insert_time) as minute_of_day", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MINUTE_OF_DAY(insert_time) as minute_of_day", "LIMIT 500"); for (SearchHit hit : hits) { int minuteOfDay = (int) getField(hit, "minute_of_day"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -177,9 +158,7 @@ public void minuteOfDay() throws IOException { @Test public void minuteOfHour() throws IOException { - SearchHit[] hits = query( - "SELECT MINUTE_OF_HOUR(insert_time) as minute_of_hour", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MINUTE_OF_HOUR(insert_time) as minute_of_hour", "LIMIT 500"); for (SearchHit hit : hits) { int minuteOfHour = (int) getField(hit, "minute_of_hour"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -189,9 +168,8 @@ public void minuteOfHour() throws IOException { @Test public void secondOfMinute() throws IOException { - SearchHit[] hits = query( - "SELECT SECOND_OF_MINUTE(insert_time) as second_of_minute", "LIMIT 500" - ); + SearchHit[] hits = + query("SELECT SECOND_OF_MINUTE(insert_time) as second_of_minute", "LIMIT 500"); for (SearchHit hit : hits) { int secondOfMinute = (int) getField(hit, "second_of_minute"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -201,9 +179,7 @@ public void secondOfMinute() throws IOException { @Test public void month() throws IOException { - SearchHit[] hits = query( - "SELECT MONTH(insert_time) AS month", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MONTH(insert_time) AS month", "LIMIT 500"); for (SearchHit hit : hits) { int month = (int) getField(hit, "month"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -213,9 +189,7 @@ public void month() throws IOException { @Test public void dayofmonth() throws IOException { - SearchHit[] hits = query( - "SELECT DAYOFMONTH(insert_time) AS dayofmonth", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT DAYOFMONTH(insert_time) AS dayofmonth", "LIMIT 500"); for (SearchHit hit : hits) { int dayofmonth = (int) getField(hit, "dayofmonth"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -225,9 +199,7 @@ public void dayofmonth() throws IOException { @Test public void date() throws IOException { - SearchHit[] hits = query( - "SELECT DATE(insert_time) AS date", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT DATE(insert_time) AS date", "LIMIT 500"); for (SearchHit hit : hits) { String date = (String) getField(hit, "date"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -237,9 +209,7 @@ public void date() throws IOException { @Test public void monthname() throws IOException { - SearchHit[] hits = query( - "SELECT MONTHNAME(insert_time) AS monthname", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MONTHNAME(insert_time) AS monthname", "LIMIT 500"); for (SearchHit hit : hits) { String monthname = (String) getField(hit, "monthname"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -249,9 +219,7 @@ public void monthname() throws IOException { @Test public void timestamp() throws IOException { - SearchHit[] hits = query( - "SELECT TIMESTAMP(insert_time) AS timestamp", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT TIMESTAMP(insert_time) AS timestamp", "LIMIT 500"); for (SearchHit hit : hits) { String timastamp = (String) getField(hit, "timestamp"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -284,14 +252,16 @@ private SearchHit[] query(String select, String... statements) throws IOExceptio return execute(select + " " + FROM + " " + String.join(" ", statements)); } - // TODO: I think this code is now re-used in multiple classes, would be good to move to the base class. + // TODO: I think this code is now re-used in multiple classes, would be good to move to the base + // class. private SearchHit[] execute(String sqlRequest) throws IOException { final JSONObject jsonObject = executeRequest(makeRequest(sqlRequest)); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser).getHits().getHits(); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java index 4fad5a23b7..24895b5b69 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.core.IsEqual.equalTo; @@ -20,8 +19,8 @@ public class DeleteIT extends SQLIntegTestCase { protected void init() throws Exception { loadIndex(Index.ACCOUNT); loadIndex(Index.PHRASE); - updateClusterSettings(new ClusterSetting(PERSISTENT, - Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "true")); + updateClusterSettings( + new ClusterSetting(PERSISTENT, Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "true")); } @Test @@ -34,7 +33,8 @@ public void deleteAllTest() throws IOException, InterruptedException { response = executeRequest(makeRequest(deleteQuery)); assertThat(response.getInt("deleted"), equalTo(totalHits)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); @@ -44,20 +44,21 @@ public void deleteAllTest() throws IOException, InterruptedException { @Test public void deleteWithConditionTest() throws IOException, InterruptedException { - String selectQuery = StringUtils.format( - "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String selectQuery = + StringUtils.format( + "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); JSONObject response = executeRequest(makeRequest(selectQuery)); int totalHits = getTotalHits(response); - String deleteQuery = StringUtils.format( - "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String deleteQuery = + StringUtils.format( + "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); response = executeRequest(makeRequest(deleteQuery)); assertThat(response.getInt("deleted"), equalTo(totalHits)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); @@ -84,7 +85,8 @@ public void deleteAllWithJdbcFormat() throws IOException, InterruptedException { assertThat(response.query("/status"), equalTo(200)); assertThat(response.query("/size"), equalTo(1)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); @@ -98,18 +100,18 @@ public void deleteAllWithJdbcFormat() throws IOException, InterruptedException { @Test public void deleteWithConditionTestJdbcFormat() throws IOException, InterruptedException { - String selectQuery = StringUtils.format( - "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String selectQuery = + StringUtils.format( + "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); JSONObject response = executeRequest(makeRequest(selectQuery)); int totalHits = getTotalHits(response); - String deleteQuery = StringUtils.format( - "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String deleteQuery = + StringUtils.format( + "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); response = new JSONObject(executeQuery(deleteQuery, "jdbc")); System.out.println(response); @@ -120,7 +122,8 @@ public void deleteWithConditionTestJdbcFormat() throws IOException, InterruptedE assertThat(response.query("/status"), equalTo(200)); assertThat(response.query("/size"), equalTo(1)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java index 4ecabdbf01..b42e9f84f4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -40,16 +39,20 @@ protected void init() throws Exception { @Test public void searchSanity() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/search_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/search_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); String result = - explainQuery(String.format("SELECT * FROM %s WHERE firstname LIKE 'A%%' AND age > 20 " + - "GROUP BY gender order by _score", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "SELECT * FROM %s WHERE firstname LIKE 'A%%' AND age > 20 " + + "GROUP BY gender order by _score", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } // This test was ignored because group by case function is not supported @@ -57,118 +60,153 @@ public void searchSanity() throws IOException { @Test public void aggregationQuery() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/aggregation_query_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - String result = explainQuery( - String.format("SELECT address, CASE WHEN gender='0' then 'aaa' else 'bbb'end a2345," + - "count(age) FROM %s GROUP BY terms('field'='address','execution_hint'='global_ordinals'),a2345", - TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/aggregation_query_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + String result = + explainQuery( + String.format( + "SELECT address, CASE WHEN gender='0' then 'aaa' else 'bbb'end a2345,count(age)" + + " FROM %s GROUP BY" + + " terms('field'='address','execution_hint'='global_ordinals'),a2345", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void explainScriptValue() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/script_value.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/script_value.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); - String result = explainQuery(String.format("SELECT case when gender is null then 'aaa' " + - "else gender end test , account_number FROM %s", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String result = + explainQuery( + String.format( + "SELECT case when gender is null then 'aaa' " + + "else gender end test , account_number FROM %s", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void betweenScriptValue() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/between_query.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/between_query.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); String result = - explainQuery(String.format("SELECT case when balance between 100 and 200 then 'aaa' " + - "else balance end test, account_number FROM %s", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "SELECT case when balance between 100 and 200 then 'aaa' " + + "else balance end test, account_number FROM %s", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void searchSanityFilter() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/search_explain_filter.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/search_explain_filter.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); - String result = explainQuery(String.format("SELECT * FROM %s WHERE firstname LIKE 'A%%' " + - "AND age > 20 GROUP BY gender", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String result = + explainQuery( + String.format( + "SELECT * FROM %s WHERE firstname LIKE 'A%%' " + "AND age > 20 GROUP BY gender", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void deleteSanity() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/delete_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/delete_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); ; String result = - explainQuery(String.format("DELETE FROM %s WHERE firstname LIKE 'A%%' AND age > 20", - TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "DELETE FROM %s WHERE firstname LIKE 'A%%' AND age > 20", TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void spatialFilterExplainTest() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/search_spatial_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/search_spatial_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); ; - String result = explainQuery(String.format("SELECT * FROM %s WHERE GEO_INTERSECTS" + - "(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", TEST_INDEX_LOCATION)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String result = + explainQuery( + String.format( + "SELECT * FROM %s WHERE GEO_INTERSECTS" + + "(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", + TEST_INDEX_LOCATION)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void orderByOnNestedFieldTest() throws Exception { String result = - explainQuery(String.format("SELECT * FROM %s ORDER BY NESTED('message.info','message')", - TEST_INDEX_NESTED_TYPE)); - Assert.assertThat(result.replaceAll("\\s+", ""), - equalTo("{\"from\":0,\"size\":200,\"sort\":[{\"message.info\":" + - "{\"order\":\"asc\",\"nested\":{\"path\":\"message\"}}}]}")); + explainQuery( + String.format( + "SELECT * FROM %s ORDER BY NESTED('message.info','message')", + TEST_INDEX_NESTED_TYPE)); + Assert.assertThat( + result.replaceAll("\\s+", ""), + equalTo( + "{\"from\":0,\"size\":200,\"sort\":[{\"message.info\":" + + "{\"order\":\"asc\",\"nested\":{\"path\":\"message\"}}}]}")); } @Test public void multiMatchQuery() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/multi_match_query.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/multi_match_query.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); String result = - explainQuery(String.format("SELECT * FROM %s WHERE multimatch('query'='this is a test'," + - "'fields'='subject^3,message','analyzer'='standard','type'='best_fields','boost'=1.0," + - "'slop'=0,'tie_breaker'=0.3,'operator'='and')", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "SELECT * FROM %s WHERE multimatch('query'='this is a test'," + + "'fields'='subject^3,message','analyzer'='standard','type'='best_fields','boost'=1.0," + + "'slop'=0,'tie_breaker'=0.3,'operator'='and')", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test @@ -180,36 +218,49 @@ public void termsIncludeExcludeExplainTest() throws IOException { final String expected3 = "\"include\":{\"partition\":0,\"num_partitions\":20}"; String result = - explainQuery(queryPrefix + " terms('field'='correspond_brand_name','size'='10'," + - "'alias'='correspond_brand_name','include'='\\\".*sport.*\\\"','exclude'='\\\"water_.*\\\"')"); + explainQuery( + queryPrefix + + " terms('field'='correspond_brand_name','size'='10'," + + "'alias'='correspond_brand_name','include'='\\\".*sport.*\\\"','exclude'='\\\"water_.*\\\"')"); Assert.assertThat(result, containsString(expected1)); - result = explainQuery(queryPrefix + "terms('field'='correspond_brand_name','size'='10'," + - "'alias'='correspond_brand_name','include'='[\\\"mazda\\\", \\\"honda\\\"]'," + - "'exclude'='[\\\"rover\\\", \\\"jensen\\\"]')"); + result = + explainQuery( + queryPrefix + + "terms('field'='correspond_brand_name','size'='10'," + + "'alias'='correspond_brand_name','include'='[\\\"mazda\\\", \\\"honda\\\"]'," + + "'exclude'='[\\\"rover\\\", \\\"jensen\\\"]')"); Assert.assertThat(result, containsString(expected2)); - result = explainQuery(queryPrefix + "terms('field'='correspond_brand_name','size'='10'," + - "'alias'='correspond_brand_name','include'='{\\\"partition\\\":0,\\\"num_partitions\\\":20}')"); + result = + explainQuery( + queryPrefix + + "terms('field'='correspond_brand_name','size'='10'," + + "'alias'='correspond_brand_name','include'='{\\\"partition\\\":0,\\\"num_partitions\\\":20}')"); Assert.assertThat(result, containsString(expected3)); } @Test public void explainNLJoin() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/nested_loop_join_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - String query = "SELECT /*! USE_NL*/ a.firstname ,a.lastname , a.gender ,d.dog_name FROM " + - TEST_INDEX_PEOPLE + "/people a JOIN " + TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname" + - " WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1"; + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/nested_loop_join_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + String query = + "SELECT /*! USE_NL*/ a.firstname ,a.lastname , a.gender ,d.dog_name FROM " + + TEST_INDEX_PEOPLE + + "/people a JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname" + + " WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1"; String result = explainQuery(query); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } public void testContentTypeOfExplainRequestShouldBeJson() throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java index e23753bbd2..81edb54556 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -14,13 +13,10 @@ import org.junit.rules.ExpectedException; import org.opensearch.client.ResponseException; -/** - * Tests to cover requests with "?format=csv" parameter - */ +/** Tests to cover requests with "?format=csv" parameter */ public class GetEndpointQueryIT extends SQLIntegTestCase { - @Rule - public ExpectedException rule = ExpectedException.none(); + @Rule public ExpectedException rule = ExpectedException.none(); @Override protected void init() throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java index f796010bbe..02c55d8eb8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -22,35 +21,23 @@ import org.junit.Assert; import org.junit.Test; -/** - * Test new hash join algorithm by comparison with old implementation. - */ +/** Test new hash join algorithm by comparison with old implementation. */ public class HashJoinIT extends SQLIntegTestCase { - /** - * Hint to use old join algorithm - */ + /** Hint to use old join algorithm */ private static final String USE_OLD_JOIN_ALGORITHM = "/*! USE_NL*/"; - /** - * Set limit to 100% to bypass circuit break check - */ + /** Set limit to 100% to bypass circuit break check */ private static final String BYPASS_CIRCUIT_BREAK = "/*! JOIN_CIRCUIT_BREAK_LIMIT(100)*/"; - /** - * Enable term filter optimization - */ + /** Enable term filter optimization */ private static final String ENABLE_TERMS_FILTER = "/*! HASH_WITH_TERMS_FILTER*/"; - /** - * Default page size is greater than block size - */ + /** Default page size is greater than block size */ private static final String PAGE_SIZE_GREATER_THAN_BLOCK_SIZE = "/*! JOIN_ALGORITHM_BLOCK_SIZE(5)*/"; - /** - * Page size is smaller than block size - */ + /** Page size is smaller than block size */ private static final String PAGE_SIZE_LESS_THAN_BLOCK_SIZE = "/*! JOIN_ALGORITHM_BLOCK_SIZE(5)*/ /*! JOIN_SCROLL_PAGE_SIZE(2)*/"; @@ -75,14 +62,16 @@ public void leftJoin() throws IOException { @Test public void innerJoinUnexpandedObjectField() { - String query = String.format(Locale.ROOT, - "SELECT " + - "a.id.serial, b.id.serial " + - "FROM %1$s AS a " + - "JOIN %1$s AS b " + - "ON a.id.serial = b.attributes.hardware.correlate_id " + - "WHERE b.attributes.hardware.platform = 'Linux' ", - TEST_INDEX_UNEXPANDED_OBJECT); + String query = + String.format( + Locale.ROOT, + "SELECT " + + "a.id.serial, b.id.serial " + + "FROM %1$s AS a " + + "JOIN %1$s AS b " + + "ON a.id.serial = b.attributes.hardware.correlate_id " + + "WHERE b.attributes.hardware.platform = 'Linux' ", + TEST_INDEX_UNEXPANDED_OBJECT); JSONObject response = executeJdbcRequest(query); verifyDataRows(response, rows(3, 1), rows(3, 3)); @@ -135,8 +124,8 @@ private void testJoin(final String join) throws IOException { // TODO: reduce the balance threshold to 10000 when the memory circuit breaker issue // (https://github.com/opendistro-for-elasticsearch/sql/issues/73) is fixed. final String querySuffixTemplate = - "a.firstname, a.lastname, b.city, b.state FROM %1$s a %2$s %1$s b " + - "ON b.age = a.age WHERE a.balance > 45000 AND b.age > 25 LIMIT 1000000"; + "a.firstname, a.lastname, b.city, b.state FROM %1$s a %2$s %1$s b " + + "ON b.age = a.age WHERE a.balance > 45000 AND b.age > 25 LIMIT 1000000"; final String querySuffix = String.format(Locale.ROOT, querySuffixTemplate, TEST_INDEX_ACCOUNT, join); @@ -152,10 +141,11 @@ private void testJoinWithObjectField(final String join, final String hint) throw // TODO: reduce the balance threshold to 10000 when the memory circuit breaker issue // (https://github.com/opendistro-for-elasticsearch/sql/issues/73) is fixed. - final String querySuffixTemplate = "c.name.firstname, c.name.lastname, f.hname, f.seat " + - "FROM %1$s c %2$s %1$s f ON f.gender.keyword = c.gender.keyword " + - "AND f.house.keyword = c.house.keyword " + - "WHERE c.gender = 'M' LIMIT 1000000"; + final String querySuffixTemplate = + "c.name.firstname, c.name.lastname, f.hname, f.seat " + + "FROM %1$s c %2$s %1$s f ON f.gender.keyword = c.gender.keyword " + + "AND f.house.keyword = c.house.keyword " + + "WHERE c.gender = 'M' LIMIT 1000000"; final String querySuffix = String.format(Locale.ROOT, querySuffixTemplate, TEST_INDEX_GAME_OF_THRONES, join); @@ -180,14 +170,16 @@ private void executeAndCompareOldAndNewJoins(final String oldQuery, final String Set idsOld = new HashSet<>(); - hitsOld.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; - idsOld.add(hit.getString("_id")); - }); - - hitsNew.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; - Assert.assertTrue(idsOld.contains(hit.getString("_id"))); - }); + hitsOld.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; + idsOld.add(hit.getString("_id")); + }); + + hitsNew.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; + Assert.assertTrue(idsOld.contains(hit.getString("_id"))); + }); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java index 34e6af02b4..3bd2195a89 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.arrayContaining; @@ -26,15 +25,19 @@ public class HavingIT extends SQLIntegTestCase { private static final String SELECT_FROM_WHERE_GROUP_BY = - "SELECT state, COUNT(*) cnt " + - "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " " + - "WHERE age = 30 " + - "GROUP BY state "; - - private static final Set> states1 = rowSet(1, Arrays.asList( - "AK", "AR", "CT", "DE", "HI", "IA", "IL", "IN", "LA", "MA", "MD", "MN", - "MO", "MT", "NC", "ND", "NE", "NH", "NJ", "NV", "SD", "VT", "WV", "WY" - )); + "SELECT state, COUNT(*) cnt " + + "FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " " + + "WHERE age = 30 " + + "GROUP BY state "; + + private static final Set> states1 = + rowSet( + 1, + Arrays.asList( + "AK", "AR", "CT", "DE", "HI", "IA", "IL", "IN", "LA", "MA", "MD", "MN", "MO", "MT", + "NC", "ND", "NE", "NH", "NJ", "NV", "SD", "VT", "WV", "WY")); private static final Set> states2 = rowSet(2, Arrays.asList("AZ", "DC", "KS", "ME")); private static final Set> states3 = @@ -47,118 +50,67 @@ protected void init() throws Exception { @Test public void equalsTo() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt = 2"), - resultSet( - states2 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt = 2"), resultSet(states2)); } @Test public void lessThanOrEqual() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <= 2"), - resultSet( - states1, - states2 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <= 2"), resultSet(states1, states2)); } @Test public void notEqualsTo() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <> 2"), - resultSet( - states1, - states3 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <> 2"), resultSet(states1, states3)); } @Test public void between() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt BETWEEN 1 AND 2"), - resultSet( - states1, - states2 - ) - ); + resultSet(states1, states2)); } @Test public void notBetween() throws IOException { assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT BETWEEN 1 AND 2"), - resultSet( - states3 - ) - ); + query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT BETWEEN 1 AND 2"), resultSet(states3)); } @Test public void in() throws IOException { assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt IN (2, 3)"), - resultSet( - states2, - states3 - ) - ); + query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt IN (2, 3)"), resultSet(states2, states3)); } @Test public void notIn() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT IN (2, 3)"), - resultSet( - states1 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT IN (2, 3)"), resultSet(states1)); } @Test public void and() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt >= 1 AND cnt < 3"), - resultSet( - states1, - states2 - ) - ); + resultSet(states1, states2)); } @Test public void or() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt = 1 OR cnt = 3"), - resultSet( - states1, - states3 - ) - ); + resultSet(states1, states3)); } @Test public void not() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING NOT cnt >= 2"), - resultSet( - states1 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING NOT cnt >= 2"), resultSet(states1)); } @Test public void notAndOr() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING NOT (cnt > 0 AND cnt <= 2)"), - resultSet( - states3 - ) - ); + resultSet(states3)); } private Set query(String query) throws IOException { @@ -174,10 +126,8 @@ private Set getResult(JSONObject response, String aggName, String aggF Set result = new HashSet<>(); for (int i = 0; i < buckets.length(); i++) { JSONObject bucket = buckets.getJSONObject(i); - result.add(new Object[] { - bucket.get("key"), - ((JSONObject) bucket.get(aggFunc)).getLong("value") - }); + result.add( + new Object[] {bucket.get("key"), ((JSONObject) bucket.get(aggFunc)).getLong("value")}); } return result; @@ -185,15 +135,12 @@ private Set getResult(JSONObject response, String aggName, String aggF @SafeVarargs private final Matcher> resultSet(Set>... rowSets) { - return containsInAnyOrder(Arrays.stream(rowSets) - .flatMap(Collection::stream) - .collect(Collectors.toList())); + return containsInAnyOrder( + Arrays.stream(rowSets).flatMap(Collection::stream).collect(Collectors.toList())); } private static Set> rowSet(long count, List states) { - return states.stream() - .map(state -> row(state, count)) - .collect(Collectors.toSet()); + return states.stream().map(state -> row(state, count)).collect(Collectors.toSet()); } private static Matcher row(String state, long count) { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java index dcc90a9acf..b6c0942ba4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.CoreMatchers.anyOf; @@ -35,11 +34,11 @@ protected void init() throws Exception { @Test public void search() throws IOException { int ageToCompare = 25; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE age > %s " + - "LIMIT 1000\"}", TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"SELECT * FROM %s WHERE age > %s LIMIT 1000\"}", + TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int age = (int) hit.getSourceAsMap().get("age"); @@ -50,7 +49,7 @@ public void search() throws IOException { @Test public void searchWithFilterAndNoWhere() throws IOException { /* - * Human readable format of the request defined below: + * Human-readable format of the request defined below: * { * "query": "SELECT * FROM accounts LIMIT 1000", * "filter": { @@ -63,11 +62,14 @@ public void searchWithFilterAndNoWhere() throws IOException { * } */ int ageToCompare = 25; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "LIMIT 1000\",\"filter\":{\"range\":{\"age\":{\"gt\":%s}}}}", - TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "LIMIT 1000\",\"filter\":{\"range\":{\"age\":{\"gt\":%s}}}}", + TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int age = (int) hit.getSourceAsMap().get("age"); @@ -78,7 +80,7 @@ public void searchWithFilterAndNoWhere() throws IOException { @Test public void searchWithRangeFilter() throws IOException { /* - * Human readable format of the request defined below: + * Human-readable format of the request defined below: * { * "query": "SELECT * FROM accounts WHERE age > 25 LIMIT 1000", * "filter": { @@ -92,12 +94,15 @@ public void searchWithRangeFilter() throws IOException { */ int ageToCompare = 25; int balanceToCompare = 35000; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE age > %s " + - "LIMIT 1000\",\"filter\":{\"range\":{\"balance\":{\"lt\":%s}}}}", - TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare, balanceToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE age > %s " + + "LIMIT 1000\",\"filter\":{\"range\":{\"balance\":{\"lt\":%s}}}}", + TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare, balanceToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int age = (int) hit.getSourceAsMap().get("age"); @@ -109,12 +114,12 @@ public void searchWithRangeFilter() throws IOException { @Test /** - * Using TEST_INDEX_NESTED_TYPE here since term filter does not work properly on analyzed fields like text. - * The field 'someField' in TEST_INDEX_NESTED_TYPE is of type keyword. + * Using TEST_INDEX_NESTED_TYPE here since term filter does not work properly on analyzed fields + * like text. The field 'someField' in TEST_INDEX_NESTED_TYPE is of type keyword. */ public void searchWithTermFilter() throws IOException { /* - * Human readable format of the request defined below: + * Human-readable format of the request defined below: * { * "query": "SELECT * FROM nested_objects WHERE nested(comment.likes) < 3", * "filter": { @@ -126,12 +131,15 @@ public void searchWithTermFilter() throws IOException { */ int likesToCompare = 3; String fieldToCompare = "a"; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE nested(comment.likes) < %s\"," + - "\"filter\":{\"term\":{\"someField\":\"%s\"}}}", - TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, fieldToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE nested(comment.likes) < %s\"," + + "\"filter\":{\"term\":{\"someField\":\"%s\"}}}", + TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, fieldToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int likes = (int) ((Map) hit.getSourceAsMap().get("comment")).get("likes"); @@ -144,7 +152,7 @@ public void searchWithTermFilter() throws IOException { @Test public void searchWithNestedFilter() throws IOException { /* - * Human readable format of the request defined below: + * Human-readable format of the request defined below: * { * "query": "SELECT * FROM nested_objects WHERE nested(comment.likes) > 1", * "filter": { @@ -165,13 +173,16 @@ public void searchWithNestedFilter() throws IOException { */ int likesToCompare = 1; String dataToCompare = "aa"; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE nested(comment.likes) > %s\"," + - "\"filter\":{\"nested\":{\"path\":\"comment\"," + - "\"query\":{\"bool\":{\"must\":{\"term\":{\"comment.data\":\"%s\"}}}}}}}", - TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, dataToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE nested(comment.likes) > %s\"," + + "\"filter\":{\"nested\":{\"path\":\"comment\"," + + "\"query\":{\"bool\":{\"must\":{\"term\":{\"comment.data\":\"%s\"}}}}}}}", + TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, dataToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int likes = (int) ((Map) hit.getSourceAsMap().get("comment")).get("likes"); @@ -184,10 +195,11 @@ public void searchWithNestedFilter() throws IOException { private SearchHits query(String request) throws IOException { final JSONObject jsonObject = executeRequest(request); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser).getHits(); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java index bd72877e1c..74acad4f52 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -27,9 +26,10 @@ protected void init() throws Exception { } public void testPercentilesQuery() { - JSONObject response = executeJdbcRequest( - "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + - "FROM opensearch-sql_test_index_people"); + JSONObject response = + executeJdbcRequest( + "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + + "FROM opensearch-sql_test_index_people"); assertThat(response.getJSONArray("datarows").length(), equalTo(1)); @@ -47,9 +47,10 @@ public void testSlowQuery() throws IOException { // set slow log threshold = 0s updateClusterSettings(new ClusterSetting(PERSISTENT, "plugins.sql.slowlog", "0")); - JSONObject response = executeJdbcRequest( - "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + - "FROM opensearch-sql_test_index_people"); + JSONObject response = + executeJdbcRequest( + "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + + "FROM opensearch-sql_test_index_people"); assertThat(response.getJSONArray("datarows").length(), equalTo(1)); JSONObject percentileRow = (JSONObject) response.query("/datarows/0/0"); @@ -61,42 +62,39 @@ public void testSlowQuery() throws IOException { wipeAllClusterSettings(); } - @Ignore("flaky test, trigger resource not enough exception. " - + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') can't be pushed down ") + @Ignore( + "flaky test, trigger resource not enough exception. " + + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') can't be pushed down ") public void testDateTimeInQuery() { - JSONObject response = executeJdbcRequest( - "SELECT date_format(insert_time, 'dd-MM-YYYY') " + - "FROM opensearch-sql_test_index_online " + - "ORDER BY date_format(insert_time, 'dd-MM-YYYY') " + - "LIMIT 1" - ); + JSONObject response = + executeJdbcRequest( + "SELECT date_format(insert_time, 'dd-MM-YYYY') " + + "FROM opensearch-sql_test_index_online " + + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') " + + "LIMIT 1"); assertThat( - response.getJSONArray("datarows") - .getJSONArray(0) - .getString(0), - equalTo("17-08-2014")); + response.getJSONArray("datarows").getJSONArray(0).getString(0), equalTo("17-08-2014")); } - @Ignore("flaky test, trigger resource not enough exception. " - + "ORDER BY all_client/10 can't be pushed down ") + @Ignore( + "flaky test, trigger resource not enough exception. " + + "ORDER BY all_client/10 can't be pushed down ") public void testDivisionInQuery() { - JSONObject response = executeJdbcRequest( - "SELECT all_client/10 from opensearch-sql_test_index_online ORDER BY all_client/10 desc limit 1"); + JSONObject response = + executeJdbcRequest( + "SELECT all_client/10 from opensearch-sql_test_index_online ORDER BY all_client/10 desc" + + " limit 1"); - assertThat( - response.getJSONArray("datarows") - .getJSONArray(0) - .getDouble(0), - equalTo(16827.0)); + assertThat(response.getJSONArray("datarows").getJSONArray(0).getDouble(0), equalTo(16827.0)); } public void testGroupByInQuery() { - JSONObject response = executeJdbcRequest( - "SELECT date_format(insert_time, 'YYYY-MM-dd'), COUNT(*) " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date_format(insert_time, 'YYYY-MM-dd')" - ); + JSONObject response = + executeJdbcRequest( + "SELECT date_format(insert_time, 'YYYY-MM-dd'), COUNT(*) " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date_format(insert_time, 'YYYY-MM-dd')"); assertThat(response.getJSONArray("schema").length(), equalTo(2)); assertThat(response.getJSONArray("datarows").length(), equalTo(8)); @@ -105,28 +103,31 @@ public void testGroupByInQuery() { @Test public void numberOperatorNameCaseInsensitiveTest() { assertSchemaContains( - executeQuery("SELECT ABS(age) FROM opensearch-sql_test_index_account " + - "WHERE age IS NOT NULL ORDER BY age LIMIT 5", "jdbc"), - "ABS(age)" - ); + executeQuery( + "SELECT ABS(age) FROM opensearch-sql_test_index_account " + + "WHERE age IS NOT NULL ORDER BY age LIMIT 5", + "jdbc"), + "ABS(age)"); } @Test public void trigFunctionNameCaseInsensitiveTest() { assertSchemaContains( - executeQuery("SELECT Cos(age) FROM opensearch-sql_test_index_account " + - "WHERE age is NOT NULL ORDER BY age LIMIT 5", "jdbc"), - "Cos(age)" - ); + executeQuery( + "SELECT Cos(age) FROM opensearch-sql_test_index_account " + + "WHERE age is NOT NULL ORDER BY age LIMIT 5", + "jdbc"), + "Cos(age)"); } @Test public void stringOperatorNameCaseInsensitiveTest() { assertSchemaContains( - executeQuery("SELECT SubStrinG(lastname, 0, 2) FROM opensearch-sql_test_index_account " + - "ORDER BY age LIMIT 5", "jdbc"), - "SubStrinG(lastname, 0, 2)" - ); + executeQuery( + "SELECT SubStrinG(lastname, 0, 2) FROM opensearch-sql_test_index_account " + + "ORDER BY age LIMIT 5", + "jdbc"), + "SubStrinG(lastname, 0, 2)"); } @Ignore("DATE_FORMAT function signature changed in new engine") @@ -134,45 +135,52 @@ public void stringOperatorNameCaseInsensitiveTest() { public void dateFunctionNameCaseInsensitiveTest() { assertTrue( executeQuery( - "SELECT DATE_FORMAT(insert_time, 'yyyy-MM-dd', 'UTC') FROM opensearch-sql_test_index_online " + - "WHERE date_FORMAT(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-01-01' " + - "GROUP BY DAte_format(insert_time, 'yyyy-MM-dd', 'UTC') " + - "ORDER BY date_forMAT(insert_time, 'yyyy-MM-dd', 'UTC')", "jdbc").equalsIgnoreCase( - executeQuery( - "SELECT date_format(insert_time, 'yyyy-MM-dd', 'UTC') FROM opensearch-sql_test_index_online " + - "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-01-01' " + - "GROUP BY date_format(insert_time, 'yyyy-MM-dd', 'UTC') " + - "ORDER BY date_format(insert_time, 'yyyy-MM-dd', 'UTC')", "jdbc") - ) - ); + "SELECT DATE_FORMAT(insert_time, 'yyyy-MM-dd', 'UTC') FROM" + + " opensearch-sql_test_index_online WHERE date_FORMAT(insert_time," + + " 'yyyy-MM-dd', 'UTC') > '2014-01-01' GROUP BY DAte_format(insert_time," + + " 'yyyy-MM-dd', 'UTC') ORDER BY date_forMAT(insert_time, 'yyyy-MM-dd'," + + " 'UTC')", + "jdbc") + .equalsIgnoreCase( + executeQuery( + "SELECT date_format(insert_time, 'yyyy-MM-dd', 'UTC') FROM" + + " opensearch-sql_test_index_online WHERE date_format(insert_time," + + " 'yyyy-MM-dd', 'UTC') > '2014-01-01' GROUP BY date_format(insert_time," + + " 'yyyy-MM-dd', 'UTC') ORDER BY date_format(insert_time, 'yyyy-MM-dd'," + + " 'UTC')", + "jdbc"))); } @Test public void ipTypeShouldPassJdbcFormatter() { assertThat( - executeQuery("SELECT host AS hostIP FROM " + TestsConstants.TEST_INDEX_WEBLOG - + " ORDER BY hostIP", "jdbc"), - containsString("\"type\": \"ip\"") - ); + executeQuery( + "SELECT host AS hostIP FROM " + TestsConstants.TEST_INDEX_WEBLOG + " ORDER BY hostIP", + "jdbc"), + containsString("\"type\": \"ip\"")); } @Test public void functionWithoutAliasShouldHaveEntireFunctionAsNameInSchema() { assertThat( - executeQuery("SELECT substring(lastname, 1, 2) FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " ORDER BY substring(lastname, 1, 2)", "jdbc"), - containsString("\"name\": \"substring(lastname, 1, 2)\"") - ); + executeQuery( + "SELECT substring(lastname, 1, 2) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY substring(lastname, 1, 2)", + "jdbc"), + containsString("\"name\": \"substring(lastname, 1, 2)\"")); } @Ignore("Handled by v2 engine which returns 'name': 'substring(lastname, 1, 2)' instead") @Test public void functionWithAliasShouldHaveAliasAsNameInSchema() { assertThat( - executeQuery("SELECT substring(lastname, 1, 2) AS substring FROM " - + TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY substring", "jdbc"), - containsString("\"name\": \"substring\"") - ); + executeQuery( + "SELECT substring(lastname, 1, 2) AS substring FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY substring", + "jdbc"), + containsString("\"name\": \"substring\"")); } private void assertSchemaContains(String actualResponse, String expected) { @@ -183,7 +191,10 @@ private void assertSchemaContains(String actualResponse, String expected) { return; } } - Assert.fail("Expected field name [" + expected + "] is not found in response schema: " + - actualResponse); + Assert.fail( + "Expected field name [" + + expected + + "] is not found in response schema: " + + actualResponse); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java index 31c77fa7c0..75b2b45df6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -15,18 +14,15 @@ import org.junit.rules.ExpectedException; import org.opensearch.client.ResponseException; -/** - * Test cases for writing missing join table aliases. - */ +/** Test cases for writing missing join table aliases. */ public class JoinAliasWriterRuleIT extends SQLIntegTestCase { - @Rule - public ExpectedException exception = ExpectedException.none(); + @Rule public ExpectedException exception = ExpectedException.none(); protected void init() throws Exception { - loadIndex(Index.ORDER); // opensearch-sql_test_index_order - loadIndex(Index.BANK); // opensearch-sql_test_index_bank - loadIndex(Index.BANK_TWO); // opensearch-sql_test_index_bank_two + loadIndex(Index.ORDER); // opensearch-sql_test_index_order + loadIndex(Index.BANK); // opensearch-sql_test_index_bank + loadIndex(Index.BANK_TWO); // opensearch-sql_test_index_bank_two } @Test @@ -38,12 +34,14 @@ public void noTableAliasNoCommonColumns() throws IOException { "INNER JOIN opensearch-sql_test_index_bank ", "ON name = firstname WHERE state = 'WA' OR id < 7"), query( - "SELECT opensearch-sql_test_index_order_0.id, opensearch-sql_test_index_bank_1.firstname ", + "SELECT opensearch-sql_test_index_order_0.id," + + " opensearch-sql_test_index_bank_1.firstname ", "FROM opensearch-sql_test_index_order opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_1 ", - "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname ", - "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname" + + " ", + "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR" + + " opensearch-sql_test_index_order_0.id < 7")); } @Test @@ -59,8 +57,7 @@ public void oneTableAliasNoCommonColumns() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_0 ", "ON a.name = opensearch-sql_test_index_bank_0.firstname ", - "WHERE opensearch-sql_test_index_bank_0.state = 'WA' OR a.id < 7") - ); + "WHERE opensearch-sql_test_index_bank_0.state = 'WA' OR a.id < 7")); } @Test @@ -76,8 +73,7 @@ public void bothTableAliasNoCommonColumns() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON a.name = b.firstname ", - "WHERE b.state = 'WA' OR a.id < 7 ") - ); + "WHERE b.state = 'WA' OR a.id < 7 ")); } @Test @@ -90,12 +86,14 @@ public void tableNamesWithTypeName() throws IOException { "INNER JOIN opensearch-sql_test_index_bank/account ", "ON name = firstname WHERE state = 'WA' OR id < 7"), query( - "SELECT opensearch-sql_test_index_order_0.id, opensearch-sql_test_index_bank_1.firstname ", + "SELECT opensearch-sql_test_index_order_0.id," + + " opensearch-sql_test_index_bank_1.firstname ", "FROM opensearch-sql_test_index_order/_doc opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank/_account opensearch-sql_test_index_bank_1 ", - "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname ", - "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname" + + " ", + "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR" + + " opensearch-sql_test_index_order_0.id < 7")); } @Ignore @@ -112,8 +110,7 @@ public void tableNamesWithTypeNameExplicitTableAlias() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON a.name = b.firstname ", - "WHERE b.state = 'WA' OR a.id < 7") - ); + "WHERE b.state = 'WA' OR a.id < 7")); } @Test @@ -129,8 +126,7 @@ public void actualTableNameAsAliasOnColumnFields() throws IOException { "FROM opensearch-sql_test_index_order opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON opensearch-sql_test_index_order_0.name = b.firstname ", - "WHERE b.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "WHERE b.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7")); } @Test @@ -143,12 +139,14 @@ public void actualTableNameAsAliasOnColumnFieldsTwo() throws IOException { "ON opensearch-sql_test_index_order.name = firstname ", "WHERE opensearch-sql_test_index_bank.state = 'WA' OR id < 7"), query( - "SELECT opensearch-sql_test_index_order_0.id, opensearch-sql_test_index_bank_1.firstname ", + "SELECT opensearch-sql_test_index_order_0.id," + + " opensearch-sql_test_index_bank_1.firstname ", "FROM opensearch-sql_test_index_order opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_1", - "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname ", - "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname" + + " ", + "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR" + + " opensearch-sql_test_index_order_0.id < 7")); } @Test @@ -164,44 +162,47 @@ public void columnsWithTableAliasNotAffected() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON a.name = b.firstname ", - "WHERE b.state = 'WA' OR a.id < 7") - ); + "WHERE b.state = 'WA' OR a.id < 7")); } @Test public void commonColumnWithoutTableAliasDifferentTables() throws IOException { exception.expect(ResponseException.class); exception.expectMessage("Field name [firstname] is ambiguous"); - String explain = explainQuery(query( - "SELECT firstname, lastname ", - "FROM opensearch-sql_test_index_bank ", - "LEFT JOIN opensearch-sql_test_index_bank_two ", - "ON firstname = lastname WHERE state = 'VA' " - )); + String explain = + explainQuery( + query( + "SELECT firstname, lastname ", + "FROM opensearch-sql_test_index_bank ", + "LEFT JOIN opensearch-sql_test_index_bank_two ", + "ON firstname = lastname WHERE state = 'VA' ")); } @Test public void sameTablesNoAliasAndNoAliasOnColumns() throws IOException { exception.expect(ResponseException.class); exception.expectMessage("Not unique table/alias: [opensearch-sql_test_index_bank]"); - String explain = explainQuery(query( - "SELECT firstname, lastname ", - "FROM opensearch-sql_test_index_bank ", - "LEFT JOIN opensearch-sql_test_index_bank ", - "ON firstname = lastname WHERE state = 'VA' " - )); + String explain = + explainQuery( + query( + "SELECT firstname, lastname ", + "FROM opensearch-sql_test_index_bank ", + "LEFT JOIN opensearch-sql_test_index_bank ", + "ON firstname = lastname WHERE state = 'VA' ")); } @Test public void sameTablesNoAliasWithTableNameAsAliasOnColumns() throws IOException { exception.expect(ResponseException.class); exception.expectMessage("Not unique table/alias: [opensearch-sql_test_index_bank]"); - String explain = explainQuery(query( - "SELECT opensearch-sql_test_index_bank.firstname", - "FROM opensearch-sql_test_index_bank ", - "JOIN opensearch-sql_test_index_bank ", - "ON opensearch-sql_test_index_bank.firstname = opensearch-sql_test_index_bank.lastname" - )); + String explain = + explainQuery( + query( + "SELECT opensearch-sql_test_index_bank.firstname", + "FROM opensearch-sql_test_index_bank ", + "JOIN opensearch-sql_test_index_bank ", + "ON opensearch-sql_test_index_bank.firstname =" + + " opensearch-sql_test_index_bank.lastname")); } @Test @@ -211,16 +212,12 @@ public void sameTablesWithExplicitAliasOnFirst() throws IOException { "SELECT opensearch-sql_test_index_bank.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank a", "JOIN opensearch-sql_test_index_bank ", - "ON opensearch-sql_test_index_bank.firstname = a.lastname " - ), + "ON opensearch-sql_test_index_bank.firstname = a.lastname "), query( "SELECT opensearch-sql_test_index_bank_0.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank a", "JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_0", - "ON opensearch-sql_test_index_bank_0.firstname = a.lastname " - ) - - ); + "ON opensearch-sql_test_index_bank_0.firstname = a.lastname ")); } @Test @@ -230,16 +227,12 @@ public void sameTablesWithExplicitAliasOnSecond() throws IOException { "SELECT opensearch-sql_test_index_bank.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank ", "JOIN opensearch-sql_test_index_bank a", - "ON opensearch-sql_test_index_bank.firstname = a.lastname " - ), + "ON opensearch-sql_test_index_bank.firstname = a.lastname "), query( "SELECT opensearch-sql_test_index_bank_0.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank opensearch-sql_test_index_bank_0", "JOIN opensearch-sql_test_index_bank a", - "ON opensearch-sql_test_index_bank_0.firstname = a.lastname " - ) - - ); + "ON opensearch-sql_test_index_bank_0.firstname = a.lastname ")); } private void sameExplain(String actualQuery, String expectedQuery) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java index 46515be134..8019454b77 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.anyOf; @@ -62,10 +61,14 @@ public void joinParseCheckSelectedFieldsSplitNL() throws IOException { @Test public void joinParseWithHintsCheckSelectedFieldsSplitHASH() throws IOException { - String query = String.format(Locale.ROOT, "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + - "a.firstname ,a.lastname, a.gender ,d.dog_name FROM %s a JOIN %s d " + - "ON d.holdersName = a.firstname WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", - TEST_INDEX_PEOPLE, TEST_INDEX_DOG); + String query = + String.format( + Locale.ROOT, + "SELECT /*! HASH_WITH_TERMS_FILTER*/ a.firstname ,a.lastname, a.gender ,d.dog_name FROM" + + " %s a JOIN %s d ON d.holdersName = a.firstname WHERE (a.age > 10 OR a.balance >" + + " 2000) AND d.age > 1", + TEST_INDEX_PEOPLE, + TEST_INDEX_DOG); JSONObject result = executeQuery(query); verifyJoinParseCheckSelectedFieldsSplitResult(result, false); @@ -75,9 +78,9 @@ public void joinParseWithHintsCheckSelectedFieldsSplitHASH() throws IOException // TODO: figure out why explain does not show results from first query in term filter and // fix either the test or the code. - //Arrays.asList("daenerys","nanette","virginia","aurelia","mcgee","hattie","elinor","burton").forEach(name -> { + // Arrays.asList("daenerys","nanette","virginia","aurelia","mcgee","hattie","elinor","burton").forEach(name -> { // Assert.assertThat(explanation, containsString(name)); - //}); + // }); } @Test @@ -95,8 +98,11 @@ public void joinWithNoWhereButWithConditionNL() throws IOException { @Test public void joinWithStarHASH() throws IOException { - String query = String.format(Locale.ROOT, "SELECT * FROM %1$s c " + - "JOIN %1$s h ON h.hname = c.house ", TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT * FROM %1$s c " + "JOIN %1$s h ON h.hname = c.house ", + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -269,9 +275,13 @@ public void testLeftJoinWithLimitNL() throws IOException { @Test public void hintMultiSearchCanRunFewTimesNL() throws IOException { - String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ /*! NL_MULTISEARCH_SIZE(2)*/ " + - "c.name.firstname,c.parents.father,h.hname,h.words FROM %1$s c " + - "JOIN %1$s h", TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ /*! NL_MULTISEARCH_SIZE(2)*/ " + + "c.name.firstname,c.parents.father,h.hname,h.words FROM %1$s c " + + "JOIN %1$s h", + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -281,9 +291,13 @@ public void hintMultiSearchCanRunFewTimesNL() throws IOException { @Test public void joinWithGeoIntersectNL() throws IOException { - String query = String.format(Locale.ROOT, "SELECT p1.description,p2.description " + - "FROM %s p1 JOIN %s p2 ON GEO_INTERSECTS(p2.place,p1.place)", - TEST_INDEX_LOCATION, TEST_INDEX_LOCATION2); + String query = + String.format( + Locale.ROOT, + "SELECT p1.description,p2.description " + + "FROM %s p1 JOIN %s p2 ON GEO_INTERSECTS(p2.place,p1.place)", + TEST_INDEX_LOCATION, + TEST_INDEX_LOCATION2); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -299,11 +313,15 @@ public void joinWithGeoIntersectNL() throws IOException { @Test public void joinWithInQuery() throws IOException { - //TODO: Either change the ON condition field to keyword or create a different subquery - String query = String.format(Locale.ROOT, "SELECT c.gender,c.name.firstname,h.hname,h.words " + - "FROM %1$s c JOIN %1$s h ON h.hname = c.house " + - "WHERE c.name.firstname IN (SELECT holdersName FROM %2$s)", - TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_DOG); + // TODO: Either change the ON condition field to keyword or create a different subquery + String query = + String.format( + Locale.ROOT, + "SELECT c.gender,c.name.firstname,h.hname,h.words " + + "FROM %1$s c JOIN %1$s h ON h.hname = c.house " + + "WHERE c.name.firstname IN (SELECT holdersName FROM %2$s)", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_DOG); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -327,10 +345,14 @@ public void joinWithOrNL() throws IOException { @Test public void joinWithOrWithTermsFilterOpt() throws IOException { - String query = String.format(Locale.ROOT, "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + - "d.dog_name,c.name.firstname FROM %s c " + - "JOIN %s d ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", - TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_DOG); + String query = + String.format( + Locale.ROOT, + "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + + "d.dog_name,c.name.firstname FROM %s c " + + "JOIN %s d ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_DOG); executeQuery(query); String explanation = explainQuery(query); @@ -338,9 +360,8 @@ public void joinWithOrWithTermsFilterOpt() throws IOException { Assert.assertTrue(containsTerm(explanation, "holdersName")); Assert.assertTrue(containsTerm(explanation, "age")); - Arrays.asList("daenerys", "brandon", "eddard", "jaime").forEach( - name -> Assert.assertTrue(explanation.contains(name)) - ); + Arrays.asList("daenerys", "brandon", "eddard", "jaime") + .forEach(name -> Assert.assertTrue(explanation.contains(name))); } @Test @@ -394,26 +415,32 @@ public void leftJoinWithAllFromSecondTableNL() throws IOException { @Test public void joinParseCheckSelectedFieldsSplitNLConditionOrderEQ() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ " + - "a.firstname, a.lastname, a.gender, d.dog_name FROM %s a JOIN %s d " + - "ON a.firstname = d.holdersName WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", - TEST_INDEX_PEOPLE2, TEST_INDEX_DOG2); + final String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ a.firstname, a.lastname, a.gender, d.dog_name FROM %s a JOIN %s d" + + " ON a.firstname = d.holdersName WHERE (a.age > 10 OR a.balance > 2000) AND d.age" + + " > 1", + TEST_INDEX_PEOPLE2, + TEST_INDEX_DOG2); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(2)); - Map match1 = ImmutableMap.of( - "a.firstname", "Daenerys", - "a.lastname", "Targaryen", - "a.gender", "M", - "d.dog_name", "rex"); - Map match2 = ImmutableMap.of( - "a.firstname", "Hattie", - "a.lastname", "Bond", - "a.gender", "M", - "d.dog_name", "snoopy"); + Map match1 = + ImmutableMap.of( + "a.firstname", "Daenerys", + "a.lastname", "Targaryen", + "a.gender", "M", + "d.dog_name", "rex"); + Map match2 = + ImmutableMap.of( + "a.firstname", "Hattie", + "a.lastname", "Bond", + "a.gender", "M", + "d.dog_name", "snoopy"); Assert.assertTrue(hitsInclude(hits, match1)); Assert.assertTrue(hitsInclude(hits, match2)); @@ -422,21 +449,44 @@ public void joinParseCheckSelectedFieldsSplitNLConditionOrderEQ() throws IOExcep @Test public void joinParseCheckSelectedFieldsSplitNLConditionOrderGT() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ " + - "a.firstname, a.lastname, a.gender, d.firstname, d.age FROM " + - "%s a JOIN %s d on a.age < d.age " + - "WHERE (d.firstname = 'Lynn' OR d.firstname = 'Obrien') AND a.firstname = 'Mcgee'", - TEST_INDEX_PEOPLE, TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ a.firstname, a.lastname, a.gender, d.firstname, d.age FROM %s a" + + " JOIN %s d on a.age < d.age WHERE (d.firstname = 'Lynn' OR d.firstname =" + + " 'Obrien') AND a.firstname = 'Mcgee'", + TEST_INDEX_PEOPLE, + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(2)); - Map oneMatch = ImmutableMap.of("a.firstname", "Mcgee", "a.lastname", "Mooney", - "a.gender", "M", "d.firstname", "Obrien", "d.age", 40); - Map secondMatch = ImmutableMap.of("a.firstname", "Mcgee", "a.lastname", "Mooney", - "a.gender", "M", "d.firstname", "Lynn", "d.age", 40); + Map oneMatch = + ImmutableMap.of( + "a.firstname", + "Mcgee", + "a.lastname", + "Mooney", + "a.gender", + "M", + "d.firstname", + "Obrien", + "d.age", + 40); + Map secondMatch = + ImmutableMap.of( + "a.firstname", + "Mcgee", + "a.lastname", + "Mooney", + "a.gender", + "M", + "d.firstname", + "Lynn", + "d.age", + 40); Assert.assertTrue(hitsInclude(hits, oneMatch)); Assert.assertTrue(hitsInclude(hits, secondMatch)); @@ -445,21 +495,44 @@ public void joinParseCheckSelectedFieldsSplitNLConditionOrderGT() throws IOExcep @Test public void joinParseCheckSelectedFieldsSplitNLConditionOrderLT() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ " + - "a.firstname, a.lastname, a.gender, d.firstname, d.age FROM " + - "%s a JOIN %s d on a.age > d.age " + - "WHERE (d.firstname = 'Sandoval' OR d.firstname = 'Hewitt') AND a.firstname = 'Fulton'", - TEST_INDEX_PEOPLE, TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ a.firstname, a.lastname, a.gender, d.firstname, d.age FROM %s a" + + " JOIN %s d on a.age > d.age WHERE (d.firstname = 'Sandoval' OR d.firstname =" + + " 'Hewitt') AND a.firstname = 'Fulton'", + TEST_INDEX_PEOPLE, + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(2)); - Map oneMatch = ImmutableMap.of("a.firstname", "Fulton", "a.lastname", "Holt", - "a.gender", "F", "d.firstname", "Sandoval", "d.age", 22); - Map secondMatch = ImmutableMap.of("a.firstname", "Fulton", "a.lastname", "Holt", - "a.gender", "F", "d.firstname", "Hewitt", "d.age", 22); + Map oneMatch = + ImmutableMap.of( + "a.firstname", + "Fulton", + "a.lastname", + "Holt", + "a.gender", + "F", + "d.firstname", + "Sandoval", + "d.age", + 22); + Map secondMatch = + ImmutableMap.of( + "a.firstname", + "Fulton", + "a.lastname", + "Holt", + "a.gender", + "F", + "d.firstname", + "Hewitt", + "d.age", + 22); Assert.assertTrue(hitsInclude(hits, oneMatch)); Assert.assertTrue(hitsInclude(hits, secondMatch)); @@ -516,9 +589,12 @@ public void innerJoinNLWithNullInCondition3() throws IOException { private void joinWithAllFromSecondTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - final String query = String.format(Locale.ROOT, "SELECT%1$s c.name.firstname, d.* " + - "FROM %2$s c JOIN %2$s d ON d.hname = c.house", - hint, TEST_INDEX_GAME_OF_THRONES); + final String query = + String.format( + Locale.ROOT, + "SELECT%1$s c.name.firstname, d.* " + "FROM %2$s c JOIN %2$s d ON d.hname = c.house", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -534,9 +610,12 @@ private void joinWithAllFromSecondTable(boolean useNestedLoops) throws IOExcepti private void joinWithAllFromFirstTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - final String query = String.format(Locale.ROOT, "SELECT%1$s c.name.firstname " + - "FROM %2$s d JOIN %2$s c ON c.house = d.hname", - hint, TEST_INDEX_GAME_OF_THRONES); + final String query = + String.format( + Locale.ROOT, + "SELECT%1$s c.name.firstname " + "FROM %2$s d JOIN %2$s c ON c.house = d.hname", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -552,30 +631,40 @@ private void joinWithAllFromFirstTable(boolean useNestedLoops) throws IOExceptio private void leftJoinWithAllFromSecondTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - final String query = String.format(Locale.ROOT, "SELECT%1$s c.name.firstname, d.* " + - "FROM %2$s c LEFT JOIN %2$s d ON d.hname = c.house", - hint, TEST_INDEX_GAME_OF_THRONES); + final String query = + String.format( + Locale.ROOT, + "SELECT%1$s c.name.firstname, d.* " + + "FROM %2$s c LEFT JOIN %2$s d ON d.hname = c.house", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(7)); - hits.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; + hits.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; - Assert.assertThat(hit.getJSONObject("_source").length(), - equalTo(hit.getString("_id").endsWith("0") ? 1 : 5)); - }); + Assert.assertThat( + hit.getJSONObject("_source").length(), + equalTo(hit.getString("_id").endsWith("0") ? 1 : 5)); + }); } private void joinParseCheckSelectedFieldsSplit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s a.firstname ,a.lastname,a.gender,d.dog_name " + - "FROM %s a JOIN %s d ON d.holdersName = a.firstname " + - "WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", hint, TEST_INDEX_PEOPLE, + String.format( + Locale.ROOT, + "SELECT%s a.firstname ,a.lastname,a.gender,d.dog_name " + + "FROM %s a JOIN %s d ON d.holdersName = a.firstname " + + "WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", + hint, + TEST_INDEX_PEOPLE, TEST_INDEX_DOG); JSONObject result = executeQuery(query); @@ -585,9 +674,13 @@ private void joinParseCheckSelectedFieldsSplit(boolean useNestedLoops) throws IO private void joinNoConditionButWithWhere(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s c.gender,h.hname,h.words FROM %2$s c " + - "JOIN %2$s h WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s c.gender,h.hname,h.words FROM %2$s c " + + "JOIN %2$s h WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -598,9 +691,12 @@ private void joinNoConditionAndNoWhere(boolean useNestedLoops) throws IOExceptio final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + - "FROM %2$s c JOIN %2$s h", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + + "FROM %2$s c JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -610,17 +706,21 @@ private void joinNoConditionAndNoWhere(boolean useNestedLoops) throws IOExceptio private void joinWithNoWhereButWithCondition(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s c.gender,h.hname,h.words " + - "FROM %2$s c JOIN %2$s h ON h.hname = c.house", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s c.gender,h.hname,h.words " + "FROM %2$s c JOIN %2$s h ON h.hname = c.house", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - Map someMatch = ImmutableMap.of( - "c.gender", "F", - "h.hname", "Targaryen", - "h.words", "fireAndBlood"); + Map someMatch = + ImmutableMap.of( + "c.gender", "F", + "h.hname", "Targaryen", + "h.words", "fireAndBlood"); if (useNestedLoops) { // TODO: should the NL result be different? @@ -631,24 +731,26 @@ private void joinWithNoWhereButWithCondition(boolean useNestedLoops) throws IOEx } } - private void verifyJoinParseCheckSelectedFieldsSplitResult(JSONObject result, - boolean useNestedLoops) { + private void verifyJoinParseCheckSelectedFieldsSplitResult( + JSONObject result, boolean useNestedLoops) { - Map match1 = ImmutableMap.of( - "a.firstname", "Daenerys", - "a.lastname", "Targaryen", - "a.gender", "M", - "d.dog_name", "rex"); - Map match2 = ImmutableMap.of( - "a.firstname", "Hattie", - "a.lastname", "Bond", - "a.gender", "M", - "d.dog_name", "snoopy"); + Map match1 = + ImmutableMap.of( + "a.firstname", "Daenerys", + "a.lastname", "Targaryen", + "a.gender", "M", + "d.dog_name", "rex"); + Map match2 = + ImmutableMap.of( + "a.firstname", "Hattie", + "a.lastname", "Bond", + "a.gender", "M", + "d.dog_name", "snoopy"); JSONArray hits = getHits(result); if (useNestedLoops) { - //TODO: change field mapping in ON condition to keyword or change query to get result + // TODO: change field mapping in ON condition to keyword or change query to get result // TODO: why does NL query return no results? Assert.assertThat(hits.length(), equalTo(0)); } else { @@ -662,9 +764,12 @@ private void joinNoConditionAndNoWhereWithTotalLimit(boolean useNestedLoops) thr final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words" + - " FROM %2$s c JOIN %2$s h LIMIT 9", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words" + + " FROM %2$s c JOIN %2$s h LIMIT 9", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -675,18 +780,22 @@ private void joinWithNestedFieldsOnReturn(boolean useNestedLoops) throws IOExcep final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + - "FROM %2$s c JOIN %2$s h ON h.hname = c.house " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + + "FROM %2$s c JOIN %2$s h ON h.hname = c.house " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "c.parents.father", "Aerys", - "h.hname", "Targaryen", - "h.words", "fireAndBlood"); + final Map expectedMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "c.parents.father", "Aerys", + "h.hname", "Targaryen", + "h.words", "fireAndBlood"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(0)); } else { @@ -699,17 +808,21 @@ private void joinWithAllAliasOnReturn(boolean useNestedLoops) throws IOException final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname name,c.parents.father father," + - "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname name,c.parents.father father," + + "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "name", "Daenerys", - "father", "Aerys", - "house", "Targaryen"); + final Map expectedMatch = + ImmutableMap.of( + "name", "Daenerys", + "father", "Aerys", + "house", "Targaryen"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(0)); @@ -723,20 +836,24 @@ private void joinWithSomeAliasOnReturn(boolean useNestedLoops) throws IOExceptio final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname ,c.parents.father father, " + - "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname ,c.parents.father father, " + + "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "father", "Aerys", - "house", "Targaryen"); + final Map expectedMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "father", "Aerys", + "house", "Targaryen"); if (useNestedLoops) { - //TODO: Either change the ON condition field to keyword or create a different subquery + // TODO: Either change the ON condition field to keyword or create a different subquery Assert.assertThat(hits.length(), equalTo(0)); } else { Assert.assertThat(hits.length(), equalTo(1)); @@ -749,18 +866,22 @@ private void joinWithNestedFieldsOnComparisonAndOnReturn(boolean useNestedLoops) final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father, h.hname,h.words " + - " FROM %2$s c JOIN %2$s h ON h.hname = c.name.lastname " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father, h.hname,h.words " + + " FROM %2$s c JOIN %2$s h ON h.hname = c.name.lastname " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "c.parents.father", "Aerys", - "h.hname", "Targaryen", - "h.words", "fireAndBlood"); + final Map expectedMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "c.parents.father", "Aerys", + "h.hname", "Targaryen", + "h.words", "fireAndBlood"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(0)); @@ -773,10 +894,12 @@ private void joinWithNestedFieldsOnComparisonAndOnReturn(boolean useNestedLoops) private void testLeftJoin(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format("SELECT%s c.name.firstname, f.name.firstname,f.name.lastname " + - "FROM %2$s c LEFT JOIN %2$s f " + - "ON f.name.firstname = c.parents.father", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + "SELECT%s c.name.firstname, f.name.firstname,f.name.lastname " + + "FROM %2$s c LEFT JOIN %2$s f " + + "ON f.name.firstname = c.parents.father", + hint, TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -805,10 +928,14 @@ private void testLeftJoin(boolean useNestedLoops) throws IOException { private void hintLimits_firstLimitSecondNull(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(2,null) */ " + - "c.name.firstname,c.parents.father, h.hname,h.words " + - "FROM %2$s c JOIN %2$s h", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(2,null) */ " + + "c.name.firstname,c.parents.father, h.hname,h.words " + + "FROM %2$s c JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -818,9 +945,14 @@ private void hintLimits_firstLimitSecondNull(boolean useNestedLoops) throws IOEx private void hintLimits_firstLimitSecondLimit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(2,2) */ " + - "c.name.firstname,c.parents.father, h.hname,h.words FROM %2$s c " + - "JOIN %2$s h", hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(2,2) */ " + + "c.name.firstname,c.parents.father, h.hname,h.words FROM %2$s c " + + "JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -830,10 +962,14 @@ private void hintLimits_firstLimitSecondLimit(boolean useNestedLoops) throws IOE private void hintLimits_firstLimitSecondLimitOnlyOne(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(3,1) */ " + - "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s h " + - "JOIN %2$s c ON c.name.lastname = h.hname", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(3,1) */ " + + "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s h " + + "JOIN %2$s c ON c.name.lastname = h.hname", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -843,9 +979,14 @@ private void hintLimits_firstLimitSecondLimitOnlyOne(boolean useNestedLoops) thr private void hintLimits_firstNullSecondLimit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(null,2) */ " + - "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s c " + - "JOIN %2$s h", hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(null,2) */ " + + "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s c " + + "JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -855,10 +996,14 @@ private void hintLimits_firstNullSecondLimit(boolean useNestedLoops) throws IOEx private void testLeftJoinWithLimit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(3,null) */ " + - "c.name.firstname, f.name.firstname,f.name.lastname FROM %2$s c " + - "LEFT JOIN %2$s f ON f.name.firstname = c.parents.father", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(3,null) */ " + + "c.name.firstname, f.name.firstname,f.name.lastname FROM %2$s c " + + "LEFT JOIN %2$s f ON f.name.firstname = c.parents.father", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -868,20 +1013,27 @@ private void testLeftJoinWithLimit(boolean useNestedLoops) throws IOException { private void joinWithOr(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s d.dog_name,c.name.firstname " + - "FROM %s c JOIN %s d " + - "ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", - hint, TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_DOG); + String query = + String.format( + Locale.ROOT, + "SELECT%s d.dog_name,c.name.firstname " + + "FROM %s c JOIN %s d " + + "ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", + hint, + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_DOG); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map firstMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "d.dog_name", "rex"); - final Map secondMatch = ImmutableMap.of( - "c.name.firstname", "Brandon", - "d.dog_name", "snoopy"); + final Map firstMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "d.dog_name", "rex"); + final Map secondMatch = + ImmutableMap.of( + "c.name.firstname", "Brandon", + "d.dog_name", "snoopy"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(1)); @@ -896,10 +1048,14 @@ private void joinWithOr(boolean useNestedLoops) throws IOException { private void joinWithOrderFirstTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s c.name.firstname,d.words " + - "FROM %2$s c JOIN %2$s d ON d.hname = c.house " + - "ORDER BY c.name.firstname", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,d.words " + + "FROM %2$s c JOIN %2$s d ON d.hname = c.house " + + "ORDER BY c.name.firstname", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -912,35 +1068,42 @@ private void joinWithOrderFirstTable(boolean useNestedLoops) throws IOException String[] expectedNames = {"Brandon", "Daenerys", "Eddard", "Jaime"}; - IntStream.rangeClosed(0, 3).forEach(i -> { - String firstnamePath = String.format(Locale.ROOT, "/%d/_source/c.name.firstname", i); - Assert.assertThat(hits.query(firstnamePath), equalTo(expectedNames[i])); - }); + IntStream.rangeClosed(0, 3) + .forEach( + i -> { + String firstnamePath = + String.format(Locale.ROOT, "/%d/_source/c.name.firstname", i); + Assert.assertThat(hits.query(firstnamePath), equalTo(expectedNames[i])); + }); } } private boolean containsTerm(final String explainedQuery, final String termName) { return Pattern.compile( - Pattern.quote("\"terms\":{") - + ".*" - + Pattern.quote("\"" + termName + "\":[") - ) + Pattern.quote("\"terms\":{") + ".*" + Pattern.quote("\"" + termName + "\":[")) .matcher(explainedQuery.replaceAll("\\s+", "")) .find(); } - private void joinWithNullInCondition(boolean useNestedLoops, String left, - String oper1, String oper2, int expectedNum) + private void joinWithNullInCondition( + boolean useNestedLoops, String left, String oper1, String oper2, int expectedNum) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,c.hname," + - "f.name.firstname,f.house,f.hname FROM %s c " + - "%s JOIN %s f ON f.name.firstname = c.parents.father " + - "%s f.house = c.hname %s f.house = c.name.firstname", - hint, TEST_INDEX_GAME_OF_THRONES, left, TEST_INDEX_GAME_OF_THRONES, oper1, oper2); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,c.hname," + + "f.name.firstname,f.house,f.hname FROM %s c " + + "%s JOIN %s f ON f.name.firstname = c.parents.father " + + "%s f.house = c.hname %s f.house = c.name.firstname", + hint, + TEST_INDEX_GAME_OF_THRONES, + left, + TEST_INDEX_GAME_OF_THRONES, + oper1, + oper2); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -968,20 +1131,22 @@ private boolean hitsInclude(final JSONArray actualHits, Map expectedS return false; } - private void assertHitMatches(final JSONObject actualHit, - final Map expectedSourceValues) { + private void assertHitMatches( + final JSONObject actualHit, final Map expectedSourceValues) { final JSONObject src = actualHit.getJSONObject("_source"); Assert.assertThat(src.length(), equalTo(expectedSourceValues.size())); - src.keySet().forEach(key -> { - Assert.assertTrue(expectedSourceValues.containsKey(key)); - Object value = src.get(key); - Assert.assertThat(value, equalTo(expectedSourceValues.get(key))); - }); + src.keySet() + .forEach( + key -> { + Assert.assertTrue(expectedSourceValues.containsKey(key)); + Object value = src.get(key); + Assert.assertThat(value, equalTo(expectedSourceValues.get(key))); + }); } - private boolean hitMatches(final Map actualHit, - final Map expectedSourceValues) { + private boolean hitMatches( + final Map actualHit, final Map expectedSourceValues) { final Map src = uncheckedGetMap(actualHit.get("_source")); @@ -997,8 +1162,8 @@ private boolean hitMatches(final Map actualHit, Object actualValue = src.get(key); Object expectedValue = expectedSourceValues.get(key); - if ((actualValue == null && expectedValue != null) || - (actualValue != null && expectedValue == null)) { + if ((actualValue == null && expectedValue != null) + || (actualValue != null && expectedValue == null)) { return false; } else if (actualValue != null && !actualValue.equals(expectedValue)) { return false; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java index b42819bdf7..fcf1edf3e0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.closeTo; @@ -32,9 +31,7 @@ protected void init() throws Exception { @Test public void lowerCaseFunctionCall() throws IOException { - SearchHit[] hits = query( - "SELECT abs(age - 100) AS abs" - ); + SearchHit[] hits = query("SELECT abs(age - 100) AS abs"); for (SearchHit hit : hits) { double abs = (double) getField(hit, "abs"); assertThat(abs, greaterThanOrEqualTo(0.0)); @@ -43,9 +40,7 @@ public void lowerCaseFunctionCall() throws IOException { @Test public void upperCaseFunctionCall() throws IOException { - SearchHit[] hits = query( - "SELECT ABS(age - 100) AS abs" - ); + SearchHit[] hits = query("SELECT ABS(age - 100) AS abs"); for (SearchHit hit : hits) { double abs = (double) getField(hit, "abs"); assertThat(abs, greaterThanOrEqualTo(0.0)); @@ -54,36 +49,28 @@ public void upperCaseFunctionCall() throws IOException { @Test public void eulersNumber() throws IOException { - SearchHit[] hits = query( - "SELECT E() AS e" - ); + SearchHit[] hits = query("SELECT E() AS e"); double e = (double) getField(hits[0], "e"); assertThat(e, equalTo(Math.E)); } @Test public void pi() throws IOException { - SearchHit[] hits = query( - "SELECT PI() AS pi" - ); + SearchHit[] hits = query("SELECT PI() AS pi"); double pi = (double) getField(hits[0], "pi"); assertThat(pi, equalTo(Math.PI)); } @Test public void expm1Function() throws IOException { - SearchHit[] hits = query( - "SELECT EXPM1(2) AS expm1" - ); + SearchHit[] hits = query("SELECT EXPM1(2) AS expm1"); double expm1 = (double) getField(hits[0], "expm1"); assertThat(expm1, equalTo(Math.expm1(2))); } @Test public void degreesFunction() throws IOException { - SearchHit[] hits = query( - "SELECT age, DEGREES(age) AS degrees" - ); + SearchHit[] hits = query("SELECT age, DEGREES(age) AS degrees"); for (SearchHit hit : hits) { int age = (int) getFieldFromSource(hit, "age"); double degrees = (double) getField(hit, "degrees"); @@ -93,9 +80,7 @@ public void degreesFunction() throws IOException { @Test public void radiansFunction() throws IOException { - SearchHit[] hits = query( - "SELECT age, RADIANS(age) as radians" - ); + SearchHit[] hits = query("SELECT age, RADIANS(age) as radians"); for (SearchHit hit : hits) { int age = (int) getFieldFromSource(hit, "age"); double radians = (double) getField(hit, "radians"); @@ -105,65 +90,54 @@ public void radiansFunction() throws IOException { @Test public void sin() throws IOException { - SearchHit[] hits = query( - "SELECT SIN(PI()) as sin" - ); + SearchHit[] hits = query("SELECT SIN(PI()) as sin"); double sin = (double) getField(hits[0], "sin"); assertThat(sin, equalTo(Math.sin(Math.PI))); } @Test public void asin() throws IOException { - SearchHit[] hits = query( - "SELECT ASIN(PI()) as asin" - ); + SearchHit[] hits = query("SELECT ASIN(PI()) as asin"); double asin = Double.valueOf((String) getField(hits[0], "asin")); assertThat(asin, equalTo(Math.asin(Math.PI))); } @Test public void sinh() throws IOException { - SearchHit[] hits = query( - "SELECT SINH(PI()) as sinh" - ); + SearchHit[] hits = query("SELECT SINH(PI()) as sinh"); double sinh = (double) getField(hits[0], "sinh"); assertThat(sinh, equalTo(Math.sinh(Math.PI))); } @Test public void power() throws IOException { - SearchHit[] hits = query( - "SELECT POWER(age, 2) AS power", - "WHERE (age IS NOT NULL) AND (balance IS NOT NULL) and (POWER(balance, 3) > 0)" - ); + SearchHit[] hits = + query( + "SELECT POWER(age, 2) AS power", + "WHERE (age IS NOT NULL) AND (balance IS NOT NULL) and (POWER(balance, 3) > 0)"); double power = (double) getField(hits[0], "power"); assertTrue(power >= 0); } @Test public void atan2() throws IOException { - SearchHit[] hits = query( - "SELECT ATAN2(age, age) AS atan2", - "WHERE (age IS NOT NULL) AND (ATAN2(age, age) > 0)" - ); + SearchHit[] hits = + query( + "SELECT ATAN2(age, age) AS atan2", "WHERE (age IS NOT NULL) AND (ATAN2(age, age) > 0)"); double atan2 = (double) getField(hits[0], "atan2"); assertThat(atan2, equalTo(Math.atan2(1, 1))); } @Test public void cot() throws IOException { - SearchHit[] hits = query( - "SELECT COT(PI()) AS cot" - ); + SearchHit[] hits = query("SELECT COT(PI()) AS cot"); double cot = (double) getField(hits[0], "cot"); assertThat(cot, closeTo(1 / Math.tan(Math.PI), 0.001)); } @Test public void sign() throws IOException { - SearchHit[] hits = query( - "SELECT SIGN(E()) AS sign" - ); + SearchHit[] hits = query("SELECT SIGN(E()) AS sign"); double sign = (double) getField(hits[0], "sign"); assertThat(sign, equalTo(Math.signum(Math.E))); } @@ -186,18 +160,18 @@ public void logWithTwoParams() throws IOException { public void logInAggregationShouldPass() { assertThat( executeQuery( - "SELECT LOG(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " WHERE age IS NOT NULL GROUP BY LOG(age) ORDER BY LOG(age)", "jdbc" - ), - containsString("\"type\": \"double\"") - ); + "SELECT LOG(age) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY LOG(age) ORDER BY LOG(age)", + "jdbc"), + containsString("\"type\": \"double\"")); assertThat( executeQuery( - "SELECT LOG(2, age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " WHERE age IS NOT NULL GROUP BY LOG(2, age) ORDER BY LOG(2, age)", "jdbc" - ), - containsString("\"type\": \"double\"") - ); + "SELECT LOG(2, age) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY LOG(2, age) ORDER BY LOG(2, age)", + "jdbc"), + containsString("\"type\": \"double\"")); } @Test @@ -218,11 +192,11 @@ public void ln() throws IOException { public void lnInAggregationShouldPass() { assertThat( executeQuery( - "SELECT LN(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " WHERE age IS NOT NULL GROUP BY LN(age) ORDER BY LN(age)", "jdbc" - ), - containsString("\"type\": \"double\"") - ); + "SELECT LN(age) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY LN(age) ORDER BY LN(age)", + "jdbc"), + containsString("\"type\": \"double\"")); } @Test @@ -238,10 +212,11 @@ private SearchHit[] query(String select, String... statements) throws IOExceptio final String response = executeQueryWithStringOutput(select + " " + FROM + " " + String.join(" ", statements)); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(response)); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(response)); return SearchResponse.fromXContent(parser).getHits().getHits(); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java index 9f0fca68d5..3accb2bb17 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -26,8 +25,10 @@ import org.opensearch.client.Request; import org.opensearch.sql.legacy.utils.StringUtils; - /** + * + * + *

  * The following are tests for SHOW/DESCRIBE query support under Pretty Format Response protocol using JDBC format.
  * 

* Unlike SELECT queries, the JDBC format response of SHOW and DESCRIBE queries has determined "schema" fields. @@ -182,6 +183,7 @@ * "type": "keyword" * } * ] + *

*/ public class MetaDataQueriesIT extends SQLIntegTestCase { @@ -294,29 +296,27 @@ public void describeSingleIndex() throws IOException { @Ignore("Breaking change, the new engine will return alias instead of index name") @Test public void showSingleIndexAlias() throws IOException { - client().performRequest(new Request("PUT", - TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); + client().performRequest(new Request("PUT", TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); JSONObject expected = executeQuery("SHOW TABLES LIKE " + TestsConstants.TEST_INDEX_ACCOUNT); JSONObject actual = executeQuery("SHOW TABLES LIKE acc"); assertThat(getDataRows(actual).length(), equalTo(1)); - assertTrue(StringUtils.format("Expected: %s, actual: %s", expected, actual), - expected.similar(actual)); + assertTrue( + StringUtils.format("Expected: %s, actual: %s", expected, actual), expected.similar(actual)); } @Ignore("Breaking change, the new engine will return alias instead of index name") @Test public void describeSingleIndexAlias() throws IOException { - client().performRequest(new Request("PUT", - TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); + client().performRequest(new Request("PUT", TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); JSONObject expected = executeQuery("DESCRIBE TABLES LIKE " + TestsConstants.TEST_INDEX_ACCOUNT); JSONObject actual = executeQuery("DESCRIBE TABLES LIKE acc"); assertThat(getDataRows(actual).length(), greaterThan(0)); - assertTrue(StringUtils.format("Expected: %s, actual: %s", expected, actual), - expected.similar(actual)); + assertTrue( + StringUtils.format("Expected: %s, actual: %s", expected, actual), expected.similar(actual)); } @Test @@ -355,7 +355,8 @@ public void describeSingleIndexWithObjectFieldShouldPass() throws IOException { assertThat(dataRows.length(), greaterThan(0)); assertThat(dataRows.getJSONArray(0).length(), equalTo(DESCRIBE_FIELD_LENGTH)); - verifySome(dataRows, + verifySome( + dataRows, describeRow(TEST_INDEX_GAME_OF_THRONES, "nickname", "text"), describeRow(TEST_INDEX_GAME_OF_THRONES, "name", "object"), describeRow(TEST_INDEX_GAME_OF_THRONES, "name.firstname", "text"), @@ -402,8 +403,10 @@ public void describeWildcardIndex() throws IOException { @Test public void describeWildcardColumn() throws IOException { - JSONObject response = executeQuery(String.format("DESCRIBE TABLES LIKE %s COLUMNS LIKE %%name", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + "DESCRIBE TABLES LIKE %s COLUMNS LIKE %%name", TestsConstants.TEST_INDEX_ACCOUNT)); String pattern = ".*name"; JSONArray dataRows = getDataRows(response); @@ -418,8 +421,10 @@ public void describeWildcardColumn() throws IOException { @Test public void describeSingleCharacterWildcard() throws IOException { - JSONObject response = executeQuery(String.format("DESCRIBE TABLES LIKE %s COLUMNS LIKE %%na_e", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + "DESCRIBE TABLES LIKE %s COLUMNS LIKE %%na_e", TestsConstants.TEST_INDEX_ACCOUNT)); String pattern = ".*na.e"; JSONArray dataRows = getDataRows(response); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java index 027228a92b..7589304af0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.both; @@ -28,26 +27,35 @@ protected void init() throws Exception { } /** + * + * + *
    * query
    * "query" : {
    *   query_string" : {
    *     "query" : "address:880 Holmes Lane"
    *   }
    * }
+   * 
* * @throws IOException */ @Test public void queryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s where query('address:880 Holmes Lane') limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("query_string\\\":{\\\"query\\\":\\\"address:880 Holmes Lane")); - + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where query('address:880 Holmes Lane') limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("query_string\\\":{\\\"query\\\":\\\"address:880 Holmes Lane")); } /** + * + * + *
    * matchQuery
    * "query" : {
    *   "match" : {
@@ -57,19 +65,27 @@ public void queryTest() throws IOException {
    *     }
    *   }
    * }
+   * 
* * @throws IOException */ @Test public void matchQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s where address= matchQuery('880 Holmes Lane') limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= matchQuery('880 Holmes Lane') limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("{\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"880 Holmes Lane\\\"")); } /** + * + * + *
    * matchQuery
    * {
    *   "query": {
@@ -109,45 +125,67 @@ public void matchQueryTest() throws IOException {
    *     }
    *   }
    * }
+   * 
* * @throws IOException */ @Test - @Ignore("score query no longer maps to constant_score in the V2 engine - @see org.opensearch.sql.sql.ScoreQueryIT") + @Ignore( + "score query no longer maps to constant_score in the V2 engine - @see" + + " org.opensearch.sql.sql.ScoreQueryIT") public void scoreQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where score(matchQuery(address, 'Lane'),100) " + - "or score(matchQuery(address,'Street'),0.5) order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - both(containsString("{\"constant_score\":" + - "{\"filter\":{\"match\":{\"address\":{\"query\":\"Lane\"")).and( - containsString("{\"constant_score\":" + - "{\"filter\":{\"match\":{\"address\":{\"query\":\"Street\""))); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s " + + "where score(matchQuery(address, 'Lane'),100) " + + "or score(matchQuery(address,'Street'),0.5) order by _score desc limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + both(containsString( + "{\"constant_score\":" + "{\"filter\":{\"match\":{\"address\":{\"query\":\"Lane\"")) + .and( + containsString( + "{\"constant_score\":" + + "{\"filter\":{\"match\":{\"address\":{\"query\":\"Street\""))); } @Test public void regexpQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE address=REGEXP_QUERY('.*')", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"bool\":{\"must\":[{\"regexp\":" - + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE address=REGEXP_QUERY('.*')", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + containsString( + "{\"bool\":{\"must\":[{\"regexp\":" + + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); } @Test public void negativeRegexpQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE NOT(address=REGEXP_QUERY('.*'))", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"bool\":{\"must_not\":[{\"regexp\":" - + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT(address=REGEXP_QUERY('.*'))", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + containsString( + "{\"bool\":{\"must_not\":[{\"regexp\":" + + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); } /** + * + * + *
    * wildcardQuery
    * l*e means leae ltae ...
    * "wildcard": {
@@ -155,35 +193,47 @@ public void negativeRegexpQueryTest() throws IOException {
    *     "wildcard" : "l*e"
    *   }
    * }
+   * 
* * @throws IOException */ @Test public void wildcardQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s where address= wildcardQuery('l*e') order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"wildcard\":{\"address\":{\"wildcard\":\"l*e\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= wildcardQuery('l*e') order by _score desc" + + " limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat(result, containsString("{\"wildcard\":{\"address\":{\"wildcard\":\"l*e\"")); } /** + * + * + *
    * matchPhraseQuery
    * "address" : {
    *   "query" : "671 Bristol Street",
    *   "type" : "phrase"
    * }
+   * 
* * @throws IOException */ @Test - @Ignore("score query no longer handled by legacy engine - @see org.opensearch.sql.sql.ScoreQueryIT") + @Ignore( + "score query no longer handled by legacy engine - @see org.opensearch.sql.sql.ScoreQueryIT") public void matchPhraseQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where address= matchPhrase('671 Bristol Street') order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"match_phrase\":{\"address\":{\"query\":\"671 Bristol Street\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= matchPhrase('671 Bristol Street') order by" + + " _score desc limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("{\"match_phrase\":{\"address\":{\"query\":\"671 Bristol Street\"")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java index 3eeac66b97..238d3aeaff 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -47,9 +46,7 @@ private void multiQueries(int n) throws IOException { } private Request makeStatRequest() { - return new Request( - "GET", STATS_API_ENDPOINT - ); + return new Request("GET", STATS_API_ENDPOINT); } private String executeStatRequest(final Request request) throws IOException { @@ -69,5 +66,4 @@ private String executeStatRequest(final Request request) throws IOException { return sb.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java index d8d2b8875a..84750f8a27 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -34,15 +33,17 @@ protected void init() throws Exception { @Test public void unionAllSameRequestOnlyOneRecordTwice() throws IOException { - String query = String.format("SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Amber' " + - "LIMIT 1 " + - "UNION ALL " + - "SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Amber'", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT); + String query = + String.format( + "SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Amber' " + + "LIMIT 1 " + + "UNION ALL " + + "SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Amber'", + TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); JSONArray hits = getHits(response); @@ -58,10 +59,12 @@ public void unionAllSameRequestOnlyOneRecordTwice() throws IOException { @Test public void unionAllOnlyOneRecordEachWithAlias() throws IOException { - String query = String.format("SELECT firstname FROM %s WHERE firstname = 'Amber' " + - "UNION ALL " + - "SELECT dog_name as firstname FROM %s WHERE dog_name = 'rex'", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_DOG); + String query = + String.format( + "SELECT firstname FROM %s WHERE firstname = 'Amber' " + + "UNION ALL " + + "SELECT dog_name as firstname FROM %s WHERE dog_name = 'rex'", + TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_DOG); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(2)); @@ -80,12 +83,14 @@ public void unionAllOnlyOneRecordEachWithAlias() throws IOException { @Test public void unionAllOnlyOneRecordEachWithComplexAlias() throws IOException { - String query = String.format("SELECT firstname FROM %s WHERE firstname = 'Amber' " + - "UNION ALL " + - "SELECT name.firstname as firstname " + - "FROM %s " + - "WHERE name.firstname = 'daenerys'", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + "SELECT firstname FROM %s WHERE firstname = 'Amber' " + + "UNION ALL " + + "SELECT name.firstname as firstname " + + "FROM %s " + + "WHERE name.firstname = 'daenerys'", + TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_GAME_OF_THRONES); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(2)); @@ -144,10 +149,12 @@ public void minusCMinusDTwoFieldsNoAliasWithScrolling() throws IOException { @Test public void minusCMinusDTwoFieldsAliasOnBothSecondTableFields() throws IOException { - String query = String.format("SELECT pk, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT myId as pk, myLetter as letter FROM %s WHERE system_name = 'E'", - TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT pk, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT myId as pk, myLetter as letter FROM %s WHERE system_name = 'E'", + TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -174,10 +181,12 @@ public void minusCMinusDTwoFieldsAliasOnBothTablesWithScrolling() throws IOExcep @Test public void minusCMinusCTwoFieldsOneAlias() throws IOException { - String query = String.format("SELECT pk as myId, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT pk as myId, letter FROM %s WHERE system_name = 'C'", - TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT pk as myId, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT pk as myId, letter FROM %s WHERE system_name = 'C'", + TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(0)); @@ -185,10 +194,12 @@ public void minusCMinusCTwoFieldsOneAlias() throws IOException { @Test public void minusCMinusTNonExistentFieldTwoFields() throws IOException { - String query = String.format("SELECT pk, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT pk, letter FROM %s WHERE system_name = 'T' ", - TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT pk, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT pk, letter FROM %s WHERE system_name = 'T' ", + TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(3)); @@ -229,20 +240,24 @@ public void minusTMinusCNonExistentFieldFirstQueryWithScrollingAndOptimization() } private void innerMinusAMinusANoAlias(String hint) throws IOException { - String query = String.format("SELECT %s pk FROM %s WHERE system_name = 'A' " + - "MINUS " + - "SELECT pk FROM %s WHERE system_name = 'A'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk FROM %s WHERE system_name = 'A' " + + "MINUS " + + "SELECT pk FROM %s WHERE system_name = 'A'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(0)); } private void innerMinusAMinusBNoAlias(String hint) throws IOException { - String query = String.format("SELECT %s pk FROM %s WHERE system_name = 'A' " + - "MINUS " + - "SELECT pk FROM %s WHERE system_name = 'B'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk FROM %s WHERE system_name = 'A' " + + "MINUS " + + "SELECT pk FROM %s WHERE system_name = 'B'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -255,10 +270,12 @@ private void innerMinusAMinusBNoAlias(String hint) throws IOException { } private void innerMinusCMinusDTwoFieldsNoAlias(String hint) throws IOException { - String query = String.format("SELECT %s pk, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT pk, letter FROM %s WHERE system_name = 'D'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT pk, letter FROM %s WHERE system_name = 'D'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -274,10 +291,12 @@ private void innerMinusCMinusDTwoFieldsNoAlias(String hint) throws IOException { } private void innerMinusCMinusDTwoFieldsAliasOnBothTables(String hint) throws IOException { - String query = String.format("SELECT %s pk as myId, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT myId, myLetter as letter FROM %s WHERE system_name = 'E'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk as myId, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT myId, myLetter as letter FROM %s WHERE system_name = 'E'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -293,20 +312,24 @@ private void innerMinusCMinusDTwoFieldsAliasOnBothTables(String hint) throws IOE } private void innerMinusCMinusTNonExistentFieldOneField(String hint) throws IOException { - String query = String.format("SELECT %s letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT letter FROM %s WHERE system_name = 'T'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT letter FROM %s WHERE system_name = 'T'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(3)); } private void innerMinusTMinusCNonExistentFieldFirstQuery(String hint) throws IOException { - String query = String.format("SELECT %s letter FROM %s WHERE system_name = 'T' " + - "MINUS " + - "SELECT letter FROM %s WHERE system_name = 'C'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s letter FROM %s WHERE system_name = 'T' " + + "MINUS " + + "SELECT letter FROM %s WHERE system_name = 'C'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(0)); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java index 378fbda937..2108bf6867 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.allOf; @@ -40,6 +39,9 @@ import org.opensearch.search.SearchHit; /** + * + * + *
  * Integration test cases for both rewriting and projection logic.
  * 

* Test result: @@ -56,13 +58,13 @@ * 4) Subquery * 5) HAVING * 6) Verification for conditions mixed with regular and nested fields + *

*/ public class NestedFieldQueryIT extends SQLIntegTestCase { private static final String FROM = "FROM " + TestsConstants.TEST_INDEX_NESTED_TYPE + " n, n.message m"; - @Override protected void init() throws Exception { loadIndex(Index.NESTED); @@ -83,188 +85,71 @@ private void queryAll(String sql) throws IOException { assertThat( query(sql), hits( - hit( - myNum(1), - someField("b"), - innerHits("message", - hit( - author("e"), - info("a") - ) - ) - ), - hit( - myNum(2), - someField("a"), - innerHits("message", - hit( - author("f"), - info("b") - ) - ) - ), - hit( - myNum(3), - someField("a"), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ), + hit(myNum(1), someField("b"), innerHits("message", hit(author("e"), info("a")))), + hit(myNum(2), someField("a"), innerHits("message", hit(author("f"), info("b")))), + hit(myNum(3), someField("a"), innerHits("message", hit(author("g"), info("c")))), hit( myNum(4), someField("b"), - innerHits("message", - hit( - author("h"), - info("c") - ), - hit( - author("i"), - info("a") - ) - ) - ), + innerHits("message", hit(author("h"), info("c")), hit(author("i"), info("a")))), hit( myNum(new int[] {3, 4}), someField("a"), - innerHits("message", - hit( - author("zz"), - info("zz") - ) - ) - ) - ) - ); + innerHits("message", hit(author("zz"), info("zz")))))); } @Test public void singleCondition() throws IOException { assertThat( - query( - "SELECT myNum, m.author, m.info", - "WHERE m.info = 'c'" - ), + query("SELECT myNum, m.author, m.info", "WHERE m.info = 'c'"), hits( - hit( - myNum(3), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ), - hit( - myNum(4), - innerHits("message", - hit( - author("h"), - info("c") - ) - ) - ) - ) - ); + hit(myNum(3), innerHits("message", hit(author("g"), info("c")))), + hit(myNum(4), innerHits("message", hit(author("h"), info("c")))))); } @Test public void multipleConditionsOfNestedField() throws IOException { assertThat( - query( - "SELECT someField, m.author, m.info", - "WHERE m.info = 'c' AND m.author = 'h'" - ), - hits( - hit( - someField("b"), - innerHits("message", - hit( - author("h"), - info("c") - ) - ) - ) - ) - ); + query("SELECT someField, m.author, m.info", "WHERE m.info = 'c' AND m.author = 'h'"), + hits(hit(someField("b"), innerHits("message", hit(author("h"), info("c")))))); } @Test public void multipleConditionsOfNestedFieldNoMatch() throws IOException { assertThat( - query( - "SELECT someField, m.author, m.info", - "WHERE m.info = 'c' AND m.author = 'i'" - ), - hits() - ); + query("SELECT someField, m.author, m.info", "WHERE m.info = 'c' AND m.author = 'i'"), + hits()); } @Test public void multipleConditionsOfRegularAndNestedField() throws IOException { assertThat( - query( - "SELECT myNum, m.author, m.info", - "WHERE myNum = 3 AND m.info = 'c'" - ), - hits( - hit( - myNum(3), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ) - ) - ); + query("SELECT myNum, m.author, m.info", "WHERE myNum = 3 AND m.info = 'c'"), + hits(hit(myNum(3), innerHits("message", hit(author("g"), info("c")))))); } @Test public void multipleConditionsOfRegularOrNestedField() throws IOException { assertThat( - query( - "SELECT myNum, m.author, m.info", - "WHERE myNum = 2 OR m.info = 'c'" - ), + query("SELECT myNum, m.author, m.info", "WHERE myNum = 2 OR m.info = 'c'"), hits( - hit( - myNum(2) - ), // Note: no inner hit here because of no match in nested field - hit( - myNum(3), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ), - hit( - myNum(4), - innerHits("message", - hit( - author("h"), - info("c") - ) - ) - ) - ) - ); + hit(myNum(2)), // Note: no inner hit here because of no match in nested field + hit(myNum(3), innerHits("message", hit(author("g"), info("c")))), + hit(myNum(4), innerHits("message", hit(author("h"), info("c")))))); } @Test public void leftJoinSelectAll() throws IOException { - String sql = "SELECT * " + - "FROM opensearch-sql_test_index_employee_nested e " + - "LEFT JOIN e.projects p"; + String sql = + "SELECT * " + + "FROM opensearch-sql_test_index_employee_nested e " + + "LEFT JOIN e.projects p"; String explain = explainQuery(sql); - assertThat(explain, containsString("{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + - "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); + assertThat( + explain, + containsString( + "{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + + "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); assertThat(explain, containsString("\"_source\":{\"includes\":[\"projects.*\"")); @@ -274,42 +159,50 @@ public void leftJoinSelectAll() throws IOException { @Test public void leftJoinSpecificFields() throws IOException { - String sql = "SELECT e.name, p.name, p.started_year " + - "FROM opensearch-sql_test_index_employee_nested e " + - "LEFT JOIN e.projects p"; + String sql = + "SELECT e.name, p.name, p.started_year " + + "FROM opensearch-sql_test_index_employee_nested e " + + "LEFT JOIN e.projects p"; String explain = explainQuery(sql); - assertThat(explain, containsString("{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + - "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); + assertThat( + explain, + containsString( + "{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + + "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); assertThat(explain, containsString("\"_source\":{\"includes\":[\"name\"],")); - assertThat(explain, + assertThat( + explain, containsString("\"_source\":{\"includes\":[\"projects.name\",\"projects.started_year\"]")); JSONObject results = executeQuery(sql); Assert.assertThat(getTotalHits(results), equalTo(4)); } - @Ignore("Comma join in left join won't pass syntax check in new ANTLR parser. " - + "Ignore for now and require to change grammar too when we want to support this case.") + @Ignore( + "Comma join in left join won't pass syntax check in new ANTLR parser. " + + "Ignore for now and require to change grammar too when we want to support this case.") @Test public void leftJoinExceptionOnExtraNestedFields() throws IOException { - String sql = "SELECT * " + - "FROM opensearch-sql_test_index_employee_nested e " + - "LEFT JOIN e.projects p, e.comments c"; + String sql = + "SELECT * " + + "FROM opensearch-sql_test_index_employee_nested e " + + "LEFT JOIN e.projects p, e.comments c"; try { String explain = explainQuery(sql); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); - assertThat(entity, + assertThat( + entity, containsString("only single nested field is allowed as right table for LEFT JOIN")); assertThat(entity, containsString("\"type\":\"verification_exception\"")); } } - @Test public void aggregationWithoutGroupBy() throws IOException { String sql = "SELECT AVG(m.dayOfWeek) AS avgDay " + FROM; @@ -317,7 +210,9 @@ public void aggregationWithoutGroupBy() throws IOException { JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "message.dayOfWeek@NESTED"); - Assert.assertThat(((BigDecimal) aggregation.query("/avgDay/value")).doubleValue(), closeTo(3.166666666, 0.01)); + Assert.assertThat( + ((BigDecimal) aggregation.query("/avgDay/value")).doubleValue(), + closeTo(3.166666666, 0.01)); } @Test @@ -351,39 +246,36 @@ public void groupByRegularFieldAndSum() throws IOException { Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(msgInfoBuckets.length(), equalTo(2)); Assert.assertThat(msgInfoBuckets.query("/0/key"), equalTo("a")); - Assert.assertThat(((BigDecimal) msgInfoBuckets.query("/0/message.dayOfWeek@NESTED/sumDay/value")).doubleValue(), + Assert.assertThat( + ((BigDecimal) msgInfoBuckets.query("/0/message.dayOfWeek@NESTED/sumDay/value")) + .doubleValue(), closeTo(9.0, 0.01)); Assert.assertThat(msgInfoBuckets.query("/1/key"), equalTo("b")); - Assert.assertThat(((BigDecimal) msgInfoBuckets.query("/1/message.dayOfWeek@NESTED/sumDay/value")).doubleValue(), + Assert.assertThat( + ((BigDecimal) msgInfoBuckets.query("/1/message.dayOfWeek@NESTED/sumDay/value")) + .doubleValue(), closeTo(10.0, 0.01)); } @Test public void nestedFiledIsNotNull() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + - "WHERE p IS NOT NULL"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + + "WHERE p IS NOT NULL"; assertThat( executeQuery(sql), hitAll( kvString("/_source/name", Is.is("Bob Smith")), - kvString("/_source/name", Is.is("Jane Smith")) - ) - ); + kvString("/_source/name", Is.is("Jane Smith")))); } // Doesn't support: aggregate function other than COUNT() @SuppressWarnings("unused") public void groupByNestedFieldAndAvg() throws IOException { - query( - "SELECT m.info, AVG(m.dayOfWeek)", - "GROUP BY m.info" - ); - query( - "SELECT m.info, AVG(myNum)", - "GROUP BY m.info" - ); + query("SELECT m.info, AVG(m.dayOfWeek)", "GROUP BY m.info"); + query("SELECT m.info, AVG(myNum)", "GROUP BY m.info"); } @Test @@ -418,10 +310,11 @@ public void groupByNestedAndRegularField() throws IOException { @Test public void countAggWithoutWhere() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -437,11 +330,12 @@ public void countAggWithoutWhere() throws IOException { @Test public void countAggWithWhereOnParent() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -457,11 +351,12 @@ public void countAggWithWhereOnParent() throws IOException { @Test public void countAggWithWhereOnNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -477,11 +372,12 @@ public void countAggWithWhereOnNested() throws IOException { @Test public void countAggWithWhereOnParentOrNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -497,11 +393,12 @@ public void countAggWithWhereOnParentOrNested() throws IOException { @Test public void countAggWithWhereOnParentAndNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -517,11 +414,12 @@ public void countAggWithWhereOnParentAndNested() throws IOException { @Test public void countAggWithWhereOnNestedAndNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 0"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -537,11 +435,12 @@ public void countAggWithWhereOnNestedAndNested() throws IOException { @Test public void countAggWithWhereOnNestedOrNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -557,11 +456,12 @@ public void countAggWithWhereOnNestedOrNested() throws IOException { @Test public void countAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name, COUNT(p.started_year) as count " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING count > 0"; + String sql = + "SELECT e.name, COUNT(p.started_year) as count " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING count > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -581,10 +481,11 @@ public void countAggOnNestedInnerFieldWithoutWhere() throws IOException { @Test public void maxAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name, MAX(p.started_year) as max " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name"; + String sql = + "SELECT e.name, MAX(p.started_year) as max " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -594,20 +495,27 @@ public void maxAggOnNestedInnerFieldWithoutWhere() throws IOException { Assert.assertThat(bucket.length(), equalTo(2)); Assert.assertThat(bucket.query("/0/key"), equalTo("Bob Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/0/projects.started_year@NESTED/projects.started_year@FILTER/max/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/0/projects.started_year@NESTED/projects.started_year@FILTER/max/value")) + .doubleValue(), closeTo(2015.0, 0.01)); Assert.assertThat(bucket.query("/1/key"), equalTo("Jane Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/1/projects.started_year@NESTED/projects.started_year@FILTER/max/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/1/projects.started_year@NESTED/projects.started_year@FILTER/max/value")) + .doubleValue(), closeTo(2015.0, 0.01)); } @Test public void havingCountAggWithoutWhere() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -623,11 +531,12 @@ public void havingCountAggWithoutWhere() throws IOException { @Test public void havingCountAggWithWhereOnParent() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -643,11 +552,12 @@ public void havingCountAggWithWhereOnParent() throws IOException { @Test public void havingCountAggWithWhereOnNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -663,11 +573,12 @@ public void havingCountAggWithWhereOnNested() throws IOException { @Test public void havingCountAggWithWhereOnParentOrNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -683,11 +594,12 @@ public void havingCountAggWithWhereOnParentOrNested() throws IOException { @Test public void havingCountAggWithWhereOnParentAndNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -703,11 +615,12 @@ public void havingCountAggWithWhereOnParentAndNested() throws IOException { @Test public void havingCountAggWithWhereOnNestedAndNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 0"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -723,11 +636,12 @@ public void havingCountAggWithWhereOnNestedAndNested() throws IOException { @Test public void havingCountAggWithWhereOnNestedOrNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -743,11 +657,12 @@ public void havingCountAggWithWhereOnNestedOrNested() throws IOException { @Test public void havingCountAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p.started_year) > 0"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p.started_year) > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -767,11 +682,12 @@ public void havingCountAggOnNestedInnerFieldWithoutWhere() throws IOException { @Test public void havingMaxAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING MAX(p.started_year) > 1990"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING MAX(p.started_year) > 1990"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -781,22 +697,28 @@ public void havingMaxAggOnNestedInnerFieldWithoutWhere() throws IOException { Assert.assertThat(bucket.length(), equalTo(2)); Assert.assertThat(bucket.query("/0/key"), equalTo("Bob Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/0/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/0/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")) + .doubleValue(), closeTo(2015.0, 0.01)); Assert.assertThat(bucket.query("/1/key"), equalTo("Jane Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/1/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/1/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")) + .doubleValue(), closeTo(2015.0, 0.01)); } /*********************************************************** - Matchers for Non-Aggregation Testing + * Matchers for Non-Aggregation Testing ***********************************************************/ @SafeVarargs private final Matcher hits(Matcher... subMatchers) { - return featureValueOf("hits", arrayContainingInAnyOrder(subMatchers), - resp -> resp.getHits().getHits()); + return featureValueOf( + "hits", arrayContainingInAnyOrder(subMatchers), resp -> resp.getHits().getHits()); } @SafeVarargs @@ -834,8 +756,7 @@ public boolean matches(Object item) { } @Override - public void describeTo(Description description) { - } + public void describeTo(Description description) {} }; } @@ -860,16 +781,15 @@ private final Matcher innerHits(String path, Matcher... in return featureValueOf( "innerHits", arrayContainingInAnyOrder(innerHitMatchers), - hit -> hit.getInnerHits().get(path).getHits() - ); + hit -> hit.getInnerHits().get(path).getHits()); } /*********************************************************** - Matchers for Aggregation Testing + * Matchers for Aggregation Testing ***********************************************************/ - private FeatureMatcher featureValueOf(String name, Matcher subMatcher, - Function getter) { + private FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { return new FeatureMatcher(subMatcher, name, name) { @Override protected U featureValueOf(T actual) { @@ -879,7 +799,7 @@ protected U featureValueOf(T actual) { } /*********************************************************** - Query Utility to Fetch Response for SQL + * Query Utility to Fetch Response for SQL ***********************************************************/ private SearchResponse query(String select, String... statements) throws IOException { @@ -889,10 +809,11 @@ private SearchResponse query(String select, String... statements) throws IOExcep private SearchResponse execute(String sql) throws IOException { final JSONObject jsonObject = executeQuery(sql); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser); } @@ -904,5 +825,4 @@ private JSONObject getAggregation(final JSONObject queryResult, final String agg Assert.assertTrue(aggregations.has(aggregationName)); return aggregations.getJSONObject(aggregationName); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java index ce781123d6..3a2f48d497 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DEEP_NESTED; @@ -18,9 +17,8 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Integration test for OpenSearch object field (and nested field). - * This class is focused on simple SELECT-FROM query to ensure right column - * number and value is returned. + * Integration test for OpenSearch object field (and nested field). This class is focused on simple + * SELECT-FROM query to ensure right column number and value is returned. */ public class ObjectFieldSelectIT extends SQLIntegTestCase { @@ -36,33 +34,28 @@ public void testSelectObjectFieldItself() { verifySchema(response, schema("city", null, "object")); // Expect object field itself is returned in a single cell - verifyDataRows(response, - rows(new JSONObject( - "{\n" - + " \"name\": \"Seattle\",\n" - + " \"location\": {\"latitude\": 10.5}\n" - + "}") - ) - ); + verifyDataRows( + response, + rows( + new JSONObject( + "{\n" + + " \"name\": \"Seattle\",\n" + + " \"location\": {\"latitude\": 10.5}\n" + + "}"))); } @Test public void testSelectObjectInnerFields() { - JSONObject response = new JSONObject(query( - "SELECT city.location, city.location.latitude FROM %s")); + JSONObject response = + new JSONObject(query("SELECT city.location, city.location.latitude FROM %s")); - verifySchema(response, + verifySchema( + response, schema("city.location", null, "object"), - schema("city.location.latitude", null, "double") - ); + schema("city.location.latitude", null, "double")); // Expect inner regular or object field returned in its single cell - verifyDataRows(response, - rows( - new JSONObject("{\"latitude\": 10.5}"), - 10.5 - ) - ); + verifyDataRows(response, rows(new JSONObject("{\"latitude\": 10.5}"), 10.5)); } @Test @@ -72,15 +65,15 @@ public void testSelectNestedFieldItself() { verifySchema(response, schema("projects", null, "nested")); // Expect nested field itself is returned in a single cell - verifyDataRows(response, - rows(new JSONArray( - "[\n" - + " {\"name\": \"AWS Redshift Spectrum querying\"},\n" - + " {\"name\": \"AWS Redshift security\"},\n" - + " {\"name\": \"AWS Aurora security\"}\n" - + "]") - ) - ); + verifyDataRows( + response, + rows( + new JSONArray( + "[\n" + + " {\"name\": \"AWS Redshift Spectrum querying\"},\n" + + " {\"name\": \"AWS Redshift security\"},\n" + + " {\"name\": \"AWS Aurora security\"}\n" + + "]"))); } @Test @@ -100,10 +93,6 @@ public void testSelectObjectFieldOfArrayValuesInnerFields() { } private String query(String sql) { - return executeQuery( - StringUtils.format(sql, TEST_INDEX_DEEP_NESTED), - "jdbc" - ); + return executeQuery(StringUtils.format(sql, TEST_INDEX_DEEP_NESTED), "jdbc"); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java index 8976e09084..d73e3468d4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java @@ -3,11 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; -import static java.util.Collections.unmodifiableList; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -43,8 +40,9 @@ import org.opensearch.test.rest.OpenSearchRestTestCase; /** - * OpenSearch SQL integration test base class to support both security disabled and enabled OpenSearch cluster. - * Allows interaction with multiple external test clusters using OpenSearch's {@link RestClient}. + * OpenSearch SQL integration test base class to support both security disabled and enabled + * OpenSearch cluster. Allows interaction with multiple external test clusters using OpenSearch's + * {@link RestClient}. */ public abstract class OpenSearchSQLRestTestCase extends OpenSearchRestTestCase { @@ -67,17 +65,20 @@ public abstract class OpenSearchSQLRestTestCase extends OpenSearchRestTestCase { + "}"; private static RestClient remoteClient; + /** - * A client for the running remote OpenSearch cluster configured to take test administrative actions - * like remove all indexes after the test completes + * A client for the running remote OpenSearch cluster configured to take test administrative + * actions like remove all indexes after the test completes */ private static RestClient remoteAdminClient; protected boolean isHttps() { - boolean isHttps = Optional.ofNullable(System.getProperty("https")) - .map("true"::equalsIgnoreCase).orElse(false); + boolean isHttps = + Optional.ofNullable(System.getProperty("https")) + .map("true"::equalsIgnoreCase) + .orElse(false); if (isHttps) { - //currently only external cluster is supported for security enabled testing + // currently only external cluster is supported for security enabled testing if (!Optional.ofNullable(System.getProperty("tests.rest.cluster")).isPresent()) { throw new RuntimeException( "external cluster url should be provided for security enabled testing"); @@ -91,16 +92,14 @@ protected String getProtocol() { return isHttps() ? "https" : "http"; } - /** - * Get the client to remote cluster used for ordinary api calls while writing a test. - */ + /** Get the client to remote cluster used for ordinary api calls while writing a test. */ protected static RestClient remoteClient() { return remoteClient; } /** - * Get the client to remote cluster used for test administrative actions. - * Do not use this while writing a test. Only use it for cleaning up after tests. + * Get the client to remote cluster used for test administrative actions. Do not use this while + * writing a test. Only use it for cleaning up after tests. */ protected static RestClient remoteAdminClient() { return remoteAdminClient; @@ -139,9 +138,7 @@ public RestClient initClient(String clusterName) throws IOException { return buildClient(restClientSettings(), hosts.toArray(new HttpHost[0])); } - /** - * Get a comma delimited list of [host:port] to which to send REST requests. - */ + /** Get a comma delimited list of [host:port] to which to send REST requests. */ protected String getTestRestCluster(String clusterName) { String cluster = System.getProperty("tests.rest." + clusterName + ".http_hosts"); if (cluster == null) { @@ -149,15 +146,12 @@ protected String getTestRestCluster(String clusterName) { "Must specify [tests.rest." + clusterName + ".http_hosts] system property with a comma delimited list of [host:port] " - + "to which to send REST requests" - ); + + "to which to send REST requests"); } return cluster; } - /** - * Get a comma delimited list of [host:port] for connections between clusters. - */ + /** Get a comma delimited list of [host:port] for connections between clusters. */ protected String getTestTransportCluster(String clusterName) { String cluster = System.getProperty("tests.rest." + clusterName + ".transport_hosts"); if (cluster == null) { @@ -165,8 +159,7 @@ protected String getTestTransportCluster(String clusterName) { "Must specify [tests.rest." + clusterName + ".transport_hosts] system property with a comma delimited list of [host:port] " - + "for connections between clusters" - ); + + "for connections between clusters"); } return cluster; } @@ -192,18 +185,22 @@ protected static void wipeAllOpenSearchIndices(RestClient client) throws IOExcep // include all the indices, included hidden indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-indices.html#cat-indices-api-query-params try { - Response response = client.performRequest(new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); + Response response = + client.performRequest( + new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); JSONArray jsonArray = new JSONArray(EntityUtils.toString(response.getEntity(), "UTF-8")); for (Object object : jsonArray) { JSONObject jsonObject = (JSONObject) object; String indexName = jsonObject.getString("index"); try { - // System index, mostly named .opensearch-xxx or .opendistro-xxx, are not allowed to delete + // System index, mostly named .opensearch-xxx or .opendistro-xxx, are not allowed to + // delete if (!indexName.startsWith(".opensearch") && !indexName.startsWith(".opendistro")) { client.performRequest(new Request("DELETE", "/" + indexName)); } } catch (Exception e) { - // TODO: Ignore index delete error for now. Remove this if strict check on system index added above. + // TODO: Ignore index delete error for now. Remove this if strict check on system index + // added above. LOG.warn("Failed to delete index: " + indexName, e); } } @@ -221,20 +218,20 @@ protected static void configureClient(RestClientBuilder builder, Settings settin String userName = System.getProperty("user"); String password = System.getProperty("password"); if (userName != null && password != null) { - builder.setHttpClientConfigCallback(httpClientBuilder -> { - BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials( - new AuthScope(null, -1), - new UsernamePasswordCredentials(userName, password.toCharArray())); - return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); - }); + builder.setHttpClientConfigCallback( + httpClientBuilder -> { + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + new AuthScope(null, -1), + new UsernamePasswordCredentials(userName, password.toCharArray())); + return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + }); } OpenSearchRestTestCase.configureClient(builder, settings); } - protected static void configureHttpsClient(RestClientBuilder builder, Settings settings, - HttpHost httpHost) - throws IOException { + protected static void configureHttpsClient( + RestClientBuilder builder, Settings settings, HttpHost httpHost) throws IOException { Map headers = ThreadContext.buildDefaultHeaders(settings); Header[] defaultHeaders = new Header[headers.size()]; int i = 0; @@ -242,56 +239,63 @@ protected static void configureHttpsClient(RestClientBuilder builder, Settings s defaultHeaders[i++] = new BasicHeader(entry.getKey(), entry.getValue()); } builder.setDefaultHeaders(defaultHeaders); - builder.setHttpClientConfigCallback(httpClientBuilder -> { - String userName = Optional.ofNullable(System.getProperty("user")) - .orElseThrow(() -> new RuntimeException("user name is missing")); - String password = Optional.ofNullable(System.getProperty("password")) - .orElseThrow(() -> new RuntimeException("password is missing")); - BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider - .setCredentials(new AuthScope(httpHost), new UsernamePasswordCredentials(userName, - password.toCharArray())); - try { - final TlsStrategy tlsStrategy = ClientTlsStrategyBuilder.create() - .setSslContext(SSLContextBuilder.create() - .loadTrustMaterial(null, (chains, authType) -> true) - .build()) - .setHostnameVerifier(NoopHostnameVerifier.INSTANCE) - .build(); + builder.setHttpClientConfigCallback( + httpClientBuilder -> { + String userName = + Optional.ofNullable(System.getProperty("user")) + .orElseThrow(() -> new RuntimeException("user name is missing")); + String password = + Optional.ofNullable(System.getProperty("password")) + .orElseThrow(() -> new RuntimeException("password is missing")); + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + new AuthScope(httpHost), + new UsernamePasswordCredentials(userName, password.toCharArray())); + try { + final TlsStrategy tlsStrategy = + ClientTlsStrategyBuilder.create() + .setSslContext( + SSLContextBuilder.create() + .loadTrustMaterial(null, (chains, authType) -> true) + .build()) + .setHostnameVerifier(NoopHostnameVerifier.INSTANCE) + .build(); - return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider) - .setConnectionManager(PoolingAsyncClientConnectionManagerBuilder.create() - .setTlsStrategy(tlsStrategy) - .build()); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + return httpClientBuilder + .setDefaultCredentialsProvider(credentialsProvider) + .setConnectionManager( + PoolingAsyncClientConnectionManagerBuilder.create() + .setTlsStrategy(tlsStrategy) + .build()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); final String socketTimeoutString = settings.get(CLIENT_SOCKET_TIMEOUT); final TimeValue socketTimeout = - TimeValue.parseTimeValue(socketTimeoutString == null ? "60s" : socketTimeoutString, - CLIENT_SOCKET_TIMEOUT); + TimeValue.parseTimeValue( + socketTimeoutString == null ? "60s" : socketTimeoutString, CLIENT_SOCKET_TIMEOUT); builder.setRequestConfigCallback( - conf -> conf.setResponseTimeout(Timeout.ofMilliseconds(Math.toIntExact(socketTimeout.getMillis())))); + conf -> + conf.setResponseTimeout( + Timeout.ofMilliseconds(Math.toIntExact(socketTimeout.getMillis())))); if (settings.hasValue(CLIENT_PATH_PREFIX)) { builder.setPathPrefix(settings.get(CLIENT_PATH_PREFIX)); } } /** - * Initialize rest client to remote cluster, - * and create a connection to it from the coordinating cluster. + * Initialize rest client to remote cluster, and create a connection to it from the coordinating + * cluster. */ - public void configureMultiClusters(String remote) - throws IOException { + public void configureMultiClusters(String remote) throws IOException { initRemoteClient(remote); Request connectionRequest = new Request("PUT", "_cluster/settings"); - String connectionSetting = String.format( - REMOTE_CLUSTER_SETTING, - remote, - getTestTransportCluster(remote).split(",")[0]); + String connectionSetting = + String.format( + REMOTE_CLUSTER_SETTING, remote, getTestTransportCluster(remote).split(",")[0]); connectionRequest.setJsonEntity(connectionSetting); adminClient().performRequest(connectionRequest); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java index c8b4b87f69..20bed5d2ed 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -76,17 +75,16 @@ public void orderByIsNull() throws IOException { assertThat(query(hits, "/0/_source/id"), equalTo("5")); // Another equivalent syntax - assertThat(explainQuery("SELECT * FROM opensearch-sql_test_index_order " + - "ORDER BY id IS NULL, id DESC"), - equalTo(explainQuery("SELECT * FROM opensearch-sql_test_index_order " + - "ORDER BY id IS NULL DESC")) - ); + assertThat( + explainQuery("SELECT * FROM opensearch-sql_test_index_order ORDER BY id IS NULL, id DESC"), + equalTo( + explainQuery( + "SELECT * FROM opensearch-sql_test_index_order ORDER BY id IS NULL DESC"))); } @Test public void orderByIsNotNull() throws IOException { - String query = - "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY name IS NOT NULL"; + String query = "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY name IS NOT NULL"; JSONArray result = getSortExplain(query); assertThat(1, equalTo(result.length())); assertThat(query(result, "/0/name.keyword/order"), equalTo("asc")); @@ -95,21 +93,24 @@ public void orderByIsNotNull() throws IOException { JSONObject response = executeQuery(query); JSONArray hits = getHits(response); assertFalse(hits.getJSONObject(0).getJSONObject("_source").has("name")); - assertThat(hits.getJSONObject(hits.length() - 1).query("/_source/name").toString(), - equalTo("f")); + assertThat( + hits.getJSONObject(hits.length() - 1).query("/_source/name").toString(), equalTo("f")); // Another equivalent syntax - assertThat(explainQuery("SELECT id, name FROM opensearch-sql_test_index_order " + - "ORDER BY name IS NOT NULL"), - equalTo(explainQuery("SELECT id, name FROM opensearch-sql_test_index_order " + - "ORDER BY name IS NOT NULL ASC")) - ); + assertThat( + explainQuery( + "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY name IS NOT NULL"), + equalTo( + explainQuery( + "SELECT id, name FROM opensearch-sql_test_index_order " + + "ORDER BY name IS NOT NULL ASC"))); } @Test public void multipleOrderByWithNulls() throws IOException { String query = - "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY id IS NULL, name IS NOT NULL"; + "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY id IS NULL, name IS NOT" + + " NULL"; JSONArray result = getSortExplain(query); assertThat(result.length(), equalTo(2)); assertThat(query(result, "/0/id/missing"), equalTo("_last")); @@ -118,8 +119,9 @@ public void multipleOrderByWithNulls() throws IOException { @Test public void testOrderByMergeForSameField() throws IOException { - String query = "SELECT * FROM opensearch-sql_test_index_order " + - "ORDER BY id IS NULL, name DESC, id DESC, id IS NOT NULL, name IS NULL"; + String query = + "SELECT * FROM opensearch-sql_test_index_order " + + "ORDER BY id IS NULL, name DESC, id DESC, id IS NOT NULL, name IS NULL"; JSONArray result = getSortExplain(query); assertThat(2, equalTo(result.length())); assertThat(query(result, "/0/id/order"), equalTo("asc")); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java index ecec5844be..caea2aa7c6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -23,125 +22,191 @@ protected void init() throws Exception { // tests query results with jdbc output @Test public void simpleGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b GROUP BY lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b GROUP BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b GROUP BY lastname LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b GROUP BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void multipleGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b GROUP BY firstname, age, lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b GROUP BY 2, 3, 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b GROUP BY firstname, age, lastname" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b GROUP BY 2, 3, 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b GROUP BY `lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b GROUP BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b GROUP BY `lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b GROUP BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksAndTableAliasGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY `age`, `b`.`lastname` , firstname LIMIT 10", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY 2, 1, 3 LIMIT 10", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY `age`," + + " `b`.`lastname` , firstname LIMIT 10", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY 2, 1, 3 LIMIT 10", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void simpleOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b ORDER BY lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b ORDER BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b ORDER BY lastname LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void multipleOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b ORDER BY firstname, age, lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b ORDER BY 2, 3, 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b ORDER BY firstname, age, lastname" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b ORDER BY 2, 3, 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b ORDER BY `lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b ORDER BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b ORDER BY `lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksAndTableAliasOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } // ORDER BY IS NULL/NOT NULL @Test public void selectFieldiWithBacticksAndTableAliasOrderByOrdinalAndNull() { - String expected = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC, age is NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC," + + " age is NULL LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } - // explain @Test public void explainSelectFieldiWithBacticksAndTableAliasGroupByOrdinal() throws IOException { - String expected = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b GROUP BY `b`.`lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - String actual = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b GROUP BY 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); + String expected = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b GROUP BY `b`.`lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + String actual = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b GROUP BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(actual, equalTo(expected)); } @Test public void explainSelectFieldiWithBacticksAndTableAliasOrderByOrdinal() throws IOException { - String expected = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - String actual = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); + String expected = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + String actual = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(actual, equalTo(expected)); } @@ -149,12 +214,18 @@ public void explainSelectFieldiWithBacticksAndTableAliasOrderByOrdinal() throws @Ignore("only work for legacy engine") public void explainSelectFieldiWithBacticksAndTableAliasOrderByOrdinalAndNull() throws IOException { - String expected = explainQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC, age is NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - String actual = explainQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); + String expected = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC," + + " age is NULL LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + String actual = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(actual, equalTo(expected)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java index 5f7de5d496..9cbb73cd5b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -33,8 +32,9 @@ protected void init() throws Exception { public void sqlEnableSettingsTest() throws IOException { loadIndex(Index.ACCOUNT); updateClusterSettings(new ClusterSetting(PERSISTENT, "plugins.sql.enabled", "true")); - String query = String - .format(Locale.ROOT, "SELECT firstname FROM %s WHERE account_number=1", TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, "SELECT firstname FROM %s WHERE account_number=1", TEST_INDEX_ACCOUNT); JSONObject queryResult = executeQuery(query); assertThat(getHits(queryResult).length(), equalTo(1)); @@ -50,16 +50,19 @@ public void sqlEnableSettingsTest() throws IOException { assertThat(queryResult.getInt("status"), equalTo(400)); JSONObject error = queryResult.getJSONObject("error"); assertThat(error.getString("reason"), equalTo("Invalid SQL query")); - assertThat(error.getString("details"), equalTo( - "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is false")); + assertThat( + error.getString("details"), + equalTo( + "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is" + + " false")); assertThat(error.getString("type"), equalTo("SQLFeatureDisabledException")); wipeAllClusterSettings(); } @Test public void sqlDeleteSettingsTest() throws IOException { - updateClusterSettings(new ClusterSetting(PERSISTENT, - Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "false")); + updateClusterSettings( + new ClusterSetting(PERSISTENT, Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "false")); String deleteQuery = StringUtils.format("DELETE FROM %s", TestsConstants.TEST_INDEX_ACCOUNT); final ResponseException exception = @@ -70,8 +73,8 @@ public void sqlDeleteSettingsTest() throws IOException { "{\n" + " \"error\": {\n" + " \"reason\": \"Invalid SQL query\",\n" - + " \"details\": \"DELETE clause is disabled by default and will be deprecated. Using " - + "the plugins.sql.delete.enabled setting to enable it\",\n" + + " \"details\": \"DELETE clause is disabled by default and will be deprecated." + + " Using the plugins.sql.delete.enabled setting to enable it\",\n" + " \"type\": \"SQLFeatureDisabledException\"\n" + " },\n" + " \"status\": 400\n" @@ -84,329 +87,355 @@ public void sqlDeleteSettingsTest() throws IOException { @Test public void sqlTransientOnlySettingTest() throws IOException { // (1) compact form - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_interval\": \"80\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_interval\": \"80\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : { }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"80\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : { }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"80\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); // (2) partial expanded form - settings = "{" + - " \"transient\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics.rolling_interval\": \"75\"" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"transient\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics.rolling_interval\": \"75\"" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : { }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"75\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : { }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"75\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); - // (3) full expanded form - settings = "{" + - " \"transient\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\": {" + - " \"rolling_interval\": \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"transient\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\": {" + + " \"rolling_interval\": \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : { }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : { }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void sqlPersistentOnlySettingTest() throws IOException { // (1) compact form - String settings = "{" + - " \"persistent\": {" + - " \"plugins.query.metrics.rolling_interval\": \"80\"" + - " }" + - "}"; + String settings = + "{" + + " \"persistent\": {" + + " \"plugins.query.metrics.rolling_interval\": \"80\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"transient\" : { }," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"80\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"transient\" : { }," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"80\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); // (2) partial expanded form - settings = "{" + - " \"persistent\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics.rolling_interval\": \"75\"" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"persistent\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics.rolling_interval\": \"75\"" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"transient\" : { }," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"75\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"transient\" : { }," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"75\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); - // (3) full expanded form - settings = "{" + - " \"persistent\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\": {" + - " \"rolling_interval\": \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"persistent\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\": {" + + " \"rolling_interval\": \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"transient\" : { }," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"transient\" : { }," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } /** - * Both transient and persistent settings are applied for same settings. - * This is similar to _cluster/settings behavior + * Both transient and persistent settings are applied for same settings. This is similar to + * _cluster/settings behavior */ @Test public void sqlCombinedSettingTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"" + - " }," + - " \"persistent\": {" + - " \"plugins.sql.slowlog\" : \"2\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"" + + " }," + + " \"persistent\": {" + + " \"plugins.sql.slowlog\" : \"2\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"2\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"2\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } - /** - * Ignore all non plugins.sql settings. - * Only settings starting with plugins.sql. are affected - */ + /** Ignore all non plugins.sql settings. Only settings starting with plugins.sql. are affected */ @Test public void ignoreNonSQLSettingsTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"," + - " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + - " \"search.max_buckets\": \"10000\"," + - " \"search.max_keep_alive\": \"24h\"" + - " }," + - " \"persistent\": {" + - " \"plugins.sql.slowlog\": \"2\"," + - " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + - " \"thread_pool.analyze.queue_size\": \"16\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"," + + " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + + " \"search.max_buckets\": \"10000\"," + + " \"search.max_keep_alive\": \"24h\"" + + " }," + + " \"persistent\": {" + + " \"plugins.sql.slowlog\": \"2\"," + + " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + + " \"thread_pool.analyze.queue_size\": \"16\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"2\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"2\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void ignoreNonTransientNonPersistentSettingsTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"" + - " }," + - " \"persistent\": {" + - " \"plugins.sql.slowlog\": \"2\"" + - " }," + - " \"hello\": {" + - " \"world\" : {" + - " \"name\" : \"John Doe\"" + - " }" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"" + + " }," + + " \"persistent\": {" + + " \"plugins.sql.slowlog\": \"2\"" + + " }," + + " \"hello\": {" + + " \"world\" : {" + + " \"name\" : \"John Doe\"" + + " }" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"2\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"2\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void sqlCombinedMixedSettingTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"" + - " }," + - " \"persistent\": {" + - " \"plugins\": {" + - " \"sql\": {" + - " \"slowlog\": \"1\"" + - " }" + - " }" + - " }," + - " \"hello\": {" + - " \"world\": {" + - " \"city\": \"Seattle\"" + - " }" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"" + + " }," + + " \"persistent\": {" + + " \"plugins\": {" + + " \"sql\": {" + + " \"slowlog\": \"1\"" + + " }" + + " }" + + " }," + + " \"hello\": {" + + " \"world\": {" + + " \"city\": \"Seattle\"" + + " }" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"1\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"1\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void nonRegisteredSQLSettingsThrowException() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.sql.query.state.city\": \"Seattle\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.sql.query.state.city\": \"Seattle\"" + + " }" + + "}"; JSONObject actual; Response response = null; @@ -421,8 +450,7 @@ public void nonRegisteredSQLSettingsThrowException() throws IOException { assertThat(actual.query("/error/type"), equalTo("settings_exception")); assertThat( actual.query("/error/reason"), - equalTo("transient setting [plugins.sql.query.state.city], not recognized") - ); + equalTo("transient setting [plugins.sql.query.state.city], not recognized")); } protected static JSONObject updateViaSQLSettingsAPI(String body) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java index 88f72d1907..dd177ec1f1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import java.io.IOException; @@ -25,27 +24,34 @@ protected void init() throws Exception { public void testPreparedStatement() throws IOException { int ageToCompare = 35; - JSONObject response = executeRequest(String.format("{\n" + - " \"query\": \"SELECT * FROM %s WHERE age > ? AND state in (?, ?) LIMIT ?\",\n" + - " \"parameters\": [\n" + - " {\n" + - " \"type\": \"integer\",\n" + - " \"value\": \"" + ageToCompare + "\"\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"TN\"\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"UT\"\n" + - " },\n" + - " {\n" + - " \"type\": \"integer\",\n" + - " \"value\": \"20\"\n" + - " }\n" + - " ]\n" + - "}", TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeRequest( + String.format( + "{\n" + + " \"query\": \"SELECT * FROM %s WHERE age > ? AND state in (?, ?) LIMIT" + + " ?\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"integer\",\n" + + " \"value\": \"" + + ageToCompare + + "\"\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"TN\"\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"UT\"\n" + + " },\n" + + " {\n" + + " \"type\": \"integer\",\n" + + " \"value\": \"20\"\n" + + " }\n" + + " ]\n" + + "}", + TestsConstants.TEST_INDEX_ACCOUNT)); Assert.assertTrue(response.has("hits")); Assert.assertTrue(response.getJSONObject("hits").has("hits")); @@ -58,23 +64,23 @@ public void testPreparedStatement() throws IOException { } } - /* currently the integ test case will fail if run using Intellj, have to run using gradle command - * because the integ test cluster created by IntellJ has http diabled, need to spend some time later to - * figure out how to configure the integ test cluster properly. Related online resources: - * https://discuss.elastic.co/t/http-enabled-with-OpenSearchIntegTestCase/102032 - * https://discuss.elastic.co/t/help-with-OpenSearchIntegTestCase/105245 - @Override - protected Collection> nodePlugins() { - return Arrays.asList(MockTcpTransportPlugin.class); - } + /* currently the integ test case will fail if run using Intellj, have to run using gradle command + * because the integ test cluster created by IntellJ has http diabled, need to spend some time later to + * figure out how to configure the integ test cluster properly. Related online resources: + * https://discuss.elastic.co/t/http-enabled-with-OpenSearchIntegTestCase/102032 + * https://discuss.elastic.co/t/help-with-OpenSearchIntegTestCase/105245 + @Override + protected Collection> nodePlugins() { + return Arrays.asList(MockTcpTransportPlugin.class); + } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - // .put("node.mode", "network") - .put("http.enabled", true) - //.put("http.type", "netty4") - .build(); - } - */ + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + // .put("node.mode", "network") + .put("http.enabled", true) + //.put("http.type", "netty4") + .build(); + } + */ } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java index ef80098df6..07883d92f4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static java.util.stream.Collectors.toSet; @@ -30,6 +29,9 @@ import org.opensearch.client.Request; /** + * + * + *
  * PrettyFormatResponseIT will likely be excluding some of the tests written in PrettyFormatResponseTest since
  * those tests were asserting on class objects directly. These updated tests will only be making assertions based
  * on the REST response.
@@ -40,21 +42,32 @@
  * Tests from original integ tests excluded:
  * - noIndexType()
  * - withIndexType()
+ * 
*/ public class PrettyFormatResponseIT extends SQLIntegTestCase { - private static final Set allAccountFields = Sets.newHashSet( - "account_number", "balance", "firstname", "lastname", "age", "gender", "address", "employer", - "email", "city", "state" - ); + private static final Set allAccountFields = + Sets.newHashSet( + "account_number", + "balance", + "firstname", + "lastname", + "age", + "gender", + "address", + "employer", + "email", + "city", + "state"); private static final Set regularFields = Sets.newHashSet("someField", "myNum"); - private static final Set messageFields = Sets.newHashSet( - "message.dayOfWeek", "message.info", "message.author"); + private static final Set messageFields = + Sets.newHashSet("message.dayOfWeek", "message.info", "message.author"); - private static final Set messageFieldsWithNestedFunction = Sets.newHashSet( - "nested(message.dayOfWeek)", "nested(message.info)", "nested(message.author)"); + private static final Set messageFieldsWithNestedFunction = + Sets.newHashSet( + "nested(message.dayOfWeek)", "nested(message.info)", "nested(message.author)"); private static final Set commentFields = Sets.newHashSet("comment.data", "comment.likes"); @@ -83,19 +96,20 @@ protected Request getSqlRequest(String request, boolean explain) { public void wrongIndexType() throws IOException { String type = "wrongType"; try { - executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s/%s", - TestsConstants.TEST_INDEX_ACCOUNT, type)); + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s/%s", TestsConstants.TEST_INDEX_ACCOUNT, type)); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), - is(String.format(Locale.ROOT, "Index type %s does not exist", type))); + assertThat( + e.getMessage(), is(String.format(Locale.ROOT, "Index type %s does not exist", type))); } } @Test public void selectAll() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format(Locale.ROOT, "SELECT * FROM %s", TestsConstants.TEST_INDEX_ACCOUNT)); // This also tests that .keyword fields are ignored when SELECT * is called assertContainsColumnsInAnyOrder(getSchema(response), allAccountFields); @@ -104,9 +118,12 @@ public void selectAll() throws IOException { @Test public void selectNames() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT firstname, lastname FROM %s", + TestsConstants.TEST_INDEX_ACCOUNT)); assertContainsColumns(getSchema(response), nameFields); assertContainsData(getDataRows(response), nameFields); @@ -115,13 +132,15 @@ public void selectNames() throws IOException { @Ignore("Semantic analysis takes care of this") @Test public void selectWrongField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT wrongField FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT wrongField FROM %s", TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(getSchema(response).length(), equalTo(0)); - // DataRows object will still get populated with SearchHits but since wrongField is not available in the Map + // DataRows object will still get populated with SearchHits but since wrongField is not + // available in the Map // each row in the response will be empty // TODO Perhaps a code change should be made to format logic to ensure a // 'datarows' length of 0 in response for this case @@ -131,9 +150,12 @@ public void selectWrongField() throws IOException { @Test @Ignore("_score tested in V2 engine - @see org.opensearch.sql.sql.ScoreQueryIT") public void selectScore() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT _score FROM %s WHERE SCORE(match_phrase(phrase, 'brown fox'))", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT _score FROM %s WHERE SCORE(match_phrase(phrase, 'brown fox'))", + TestsConstants.TEST_INDEX_PHRASE)); List fields = Collections.singletonList("_score"); assertContainsColumns(getSchema(response), fields); @@ -142,14 +164,14 @@ public void selectScore() throws IOException { @Test public void selectAllFromNestedWithoutFieldInFrom() throws IOException { - assertNestedFieldQueryResultContainsColumnsAndData("SELECT * FROM %s", - regularFields, fields("message", "comment")); + assertNestedFieldQueryResultContainsColumnsAndData( + "SELECT * FROM %s", regularFields, fields("message", "comment")); } @Test public void selectAllFromNestedWithFieldInFrom() throws IOException { - assertNestedFieldQueryResultContainsColumnsAndData("SELECT * FROM %s e, e.message m", - regularFields, messageFields); + assertNestedFieldQueryResultContainsColumnsAndData( + "SELECT * FROM %s e, e.message m", regularFields, messageFields); } @Test @@ -161,29 +183,27 @@ public void selectAllFromNestedWithMultipleFieldsInFrom() throws IOException { @Test public void selectAllNestedFromNestedWithFieldInFrom() throws IOException { - assertNestedFieldQueryResultContainsColumnsAndData("SELECT m.* FROM %s e, e.message m", - messageFields); + assertNestedFieldQueryResultContainsColumnsAndData( + "SELECT m.* FROM %s e, e.message m", messageFields); } @Test public void selectSpecificRegularFieldAndAllFromNestedWithFieldInFrom() throws IOException { assertNestedFieldQueryResultContainsColumnsAndData( - "SELECT e.someField, m.* FROM %s e, e.message m", - fields("someField"), messageFields); + "SELECT e.someField, m.* FROM %s e, e.message m", fields("someField"), messageFields); } /** - * Execute the query against index with nested fields and assert result contains columns and data as expected. + * Execute the query against index with nested fields and assert result contains columns and data + * as expected. */ @SafeVarargs - private final void assertNestedFieldQueryResultContainsColumnsAndData(String query, - Set... expectedFieldNames) - throws IOException { + private final void assertNestedFieldQueryResultContainsColumnsAndData( + String query, Set... expectedFieldNames) throws IOException { JSONObject response = executeQuery(String.format(Locale.ROOT, query, TestsConstants.TEST_INDEX_NESTED_TYPE)); - Set allExpectedFieldNames = Stream.of(expectedFieldNames). - flatMap(Set::stream). - collect(toSet()); + Set allExpectedFieldNames = + Stream.of(expectedFieldNames).flatMap(Set::stream).collect(toSet()); assertContainsColumnsInAnyOrder(getSchema(response), allExpectedFieldNames); assertContainsData(getDataRows(response), allExpectedFieldNames); @@ -195,24 +215,31 @@ private Set fields(String... fieldNames) { @Test public void selectNestedFields() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT nested(message.info), someField FROM %s", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT nested(message.info), someField FROM %s", + TestsConstants.TEST_INDEX_NESTED_TYPE)); List fields = Arrays.asList("nested(message.info)", "someField"); assertContainsColumns(getSchema(response), fields); assertContainsData(getDataRows(response), fields); - // The nested test index being used contains 5 entries but one of them has an array of 2 message objects, so + // The nested test index being used contains 5 entries but one of them has an array of 2 message + // objects, so // we check to see if the amount of data rows is 6 since that is the result after flattening assertThat(getDataRows(response).length(), equalTo(6)); } @Test public void selectNestedFieldWithWildcard() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT nested(message.*) FROM %s", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT nested(message.*) FROM %s", + TestsConstants.TEST_INDEX_NESTED_TYPE)); assertContainsColumnsInAnyOrder(getSchema(response), messageFieldsWithNestedFunction); assertContainsData(getDataRows(response), messageFields); @@ -221,11 +248,13 @@ public void selectNestedFieldWithWildcard() throws IOException { @Test public void selectWithWhere() throws IOException { int balanceToCompare = 30000; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT balance " + - "FROM %s " + - "WHERE balance > %d", - TestsConstants.TEST_INDEX_ACCOUNT, balanceToCompare)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT balance " + "FROM %s " + "WHERE balance > %d", + TestsConstants.TEST_INDEX_ACCOUNT, + balanceToCompare)); /* * Previously the DataRows map was used to check specific fields but the JDBC response for "datarows" is a @@ -243,9 +272,10 @@ public void selectWithWhere() throws IOException { @Test public void groupBySingleField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s GROUP BY age", TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Collections.singletonList("age"); assertContainsColumns(getSchema(response), fields); @@ -254,9 +284,12 @@ public void groupBySingleField() throws IOException { @Test public void groupByMultipleFields() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s GROUP BY age, balance", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s GROUP BY age, balance", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("age", "balance"); assertContainsColumns(getSchema(response), fields); @@ -265,35 +298,42 @@ public void groupByMultipleFields() throws IOException { @Ignore("only work for legacy engine") public void testSizeAndTotal() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE balance > 30000 " + - "LIMIT 5", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE balance > 30000 LIMIT 5", + TestsConstants.TEST_INDEX_ACCOUNT)); JSONArray dataRows = getDataRows(response); assertThat(dataRows.length(), equalTo(5)); - // The value to compare to here was obtained by running the query in the plugin and looking at the SearchHits + // The value to compare to here was obtained by running the query in the plugin and looking at + // the SearchHits int totalHits = response.getInt("total"); assertThat(totalHits, equalTo(402)); } @Test public void testSizeWithGroupBy() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s GROUP BY age LIMIT 5", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s GROUP BY age LIMIT 5", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(getDataRows(response).length(), equalTo(5)); } @Test public void aggregationFunctionInSelect() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT COUNT(*) FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT COUNT(*) FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("COUNT(*)"); assertContainsColumns(getSchema(response), fields); @@ -310,9 +350,12 @@ public void aggregationFunctionInSelect() throws IOException { @Ignore("In MySQL and our new engine, the original text in SELECT is used as final column name") @Test public void aggregationFunctionInSelectCaseCheck() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT count(*) FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT count(*) FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("COUNT(*)"); assertContainsColumns(getSchema(response), fields); @@ -328,9 +371,12 @@ public void aggregationFunctionInSelectCaseCheck() throws IOException { @Ignore("only work for legacy engine") public void aggregationFunctionInSelectWithAlias() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT COUNT(*) AS total FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT COUNT(*) AS total FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("total"); assertContainsColumns(getSchema(response), fields); @@ -346,8 +392,10 @@ public void aggregationFunctionInSelectWithAlias() throws IOException { @Test public void aggregationFunctionInSelectNoGroupBy() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT SUM(age) FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT SUM(age) FROM %s", TestsConstants.TEST_INDEX_ACCOUNT)); String ageSum = "SUM(age)"; assertContainsColumns(getSchema(response), Collections.singletonList(ageSum)); @@ -363,9 +411,12 @@ public void aggregationFunctionInSelectNoGroupBy() throws IOException { @Test public void multipleAggregationFunctionsInSelect() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT COUNT(*), AVG(age) FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT COUNT(*), AVG(age) FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("COUNT(*)", "AVG(age)"); assertContainsColumns(getSchema(response), fields); @@ -374,12 +425,12 @@ public void multipleAggregationFunctionsInSelect() throws IOException { @Test public void aggregationFunctionInHaving() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT gender " + - "FROM %s " + - "GROUP BY gender " + - "HAVING count(*) > 500", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT gender FROM %s GROUP BY gender HAVING count(*) > 500", + TestsConstants.TEST_INDEX_ACCOUNT)); String ageSum = "gender"; assertContainsColumns(getSchema(response), Collections.singletonList(ageSum)); @@ -390,20 +441,24 @@ public void aggregationFunctionInHaving() throws IOException { } /** - * This case doesn't seem to be supported by the plugin at the moment. - * Looks like the painless script of the inner function is put inside the aggregation function but - * this syntax may not be correct since it returns 0 which is the default value (since 0 is returned in - * cases like COUNT(wrongField) as well). + * This case doesn't seem to be supported by the plugin at the moment. Looks like the painless + * script of the inner function is put inside the aggregation function but this syntax may not be + * correct since it returns 0 which is the default value (since 0 is returned in cases like + * COUNT(wrongField) as well). */ -// @Test -// public void nestedAggregationFunctionInSelect() { -// String query = String.format(Locale.ROOT, "SELECT SUM(SQRT(age)) FROM age GROUP BY age", TEST_INDEX_ACCOUNT); -// } + // @Test + // public void nestedAggregationFunctionInSelect() { + // String query = String.format(Locale.ROOT, "SELECT SUM(SQRT(age)) FROM age GROUP BY age", + // TEST_INDEX_ACCOUNT); + // } @Test public void fieldsWithAlias() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT firstname AS first, age AS a FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT firstname AS first, age AS a FROM %s", + TestsConstants.TEST_INDEX_ACCOUNT)); Map aliases = new HashMap<>(); aliases.put("firstname", "first"); @@ -414,25 +469,32 @@ public void fieldsWithAlias() throws IOException { @Test public void indexWithMissingFields() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT phrase, insert_time2 " + - "FROM %s " + - "WHERE match_phrase(phrase, 'brown fox')", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT phrase, insert_time2 " + + "FROM %s " + + "WHERE match_phrase(phrase, 'brown fox')", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray dataRowEntry = getDataRows(response).getJSONArray(0); assertThat(dataRowEntry.length(), equalTo(2)); assertThat(dataRowEntry.get(0), equalTo("brown fox")); - assertThat(dataRowEntry.get(1), - equalTo(JSONObject.NULL)); // TODO See if this null check is failing + assertThat( + dataRowEntry.get(1), equalTo(JSONObject.NULL)); // TODO See if this null check is failing } @Test public void joinQuery() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT b1.balance, b1.age, b2.firstname " + - "FROM %s b1 JOIN %s b2 ON b1.age = b2.age", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT b1.balance, b1.age, b2.firstname " + + "FROM %s b1 JOIN %s b2 ON b1.age = b2.age", + TestsConstants.TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("b1.balance", "b1.age", "b2.firstname"); assertContainsColumns(getSchema(response), fields); @@ -441,9 +503,14 @@ public void joinQuery() throws IOException { @Test public void joinQueryWithAlias() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT b1.balance AS bal, " + - " b1.age AS age, b2.firstname AS name FROM %s b1 JOIN %s b2 ON b1.age = b2.age", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT b1.balance AS bal, b1.age AS age, b2.firstname AS name FROM %s b1 JOIN %s" + + " b2 ON b1.age = b2.age", + TestsConstants.TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_ACCOUNT)); Map aliases = new HashMap<>(); aliases.put("b1.balance", "bal"); @@ -456,16 +523,20 @@ public void joinQueryWithAlias() throws IOException { @Test public void joinQueryWithObjectFieldInSelect() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT c.name.firstname, d.name.lastname " + - "FROM %s c JOIN %s d ON d.hname = c.house", - TestsConstants.TEST_INDEX_GAME_OF_THRONES, - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT c.name.firstname, d.name.lastname " + + "FROM %s c JOIN %s d ON d.hname = c.house", + TestsConstants.TEST_INDEX_GAME_OF_THRONES, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); List fields = Arrays.asList("c.name.firstname", "d.name.lastname"); assertContainsColumns(getSchema(response), fields); - // d.name.lastname is null here since entries with hname don't have a name.lastname entry, so only length is + // d.name.lastname is null here since entries with hname don't have a name.lastname entry, so + // only length is // checked JSONArray dataRows = getDataRows(response); assertThat(dataRows.length(), greaterThan(0)); @@ -476,10 +547,13 @@ public void joinQueryWithObjectFieldInSelect() throws IOException { @Test public void joinQuerySelectOnlyOnOneTable() throws Exception { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT b1.age " + - "FROM %s b1 JOIN %s b2 ON b1.firstname = b2.firstname", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT b1.age FROM %s b1 JOIN %s b2 ON b1.firstname = b2.firstname", + TestsConstants.TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Collections.singletonList("b1.age"); assertContainsColumns(getSchema(response), fields); @@ -508,8 +582,12 @@ private void testFieldOrder(final String[] expectedFields, final Object[] expect throws IOException { final String fields = String.join(", ", expectedFields); - final String query = String.format(Locale.ROOT, "SELECT %s FROM %s " + - "WHERE email='amberduke@pyrami.com'", fields, TestsConstants.TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT %s FROM %s WHERE email='amberduke@pyrami.com'", + fields, + TestsConstants.TEST_INDEX_ACCOUNT); final JSONObject result = executeQuery(query); for (int i = 0; i < expectedFields.length; ++i) { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java index 463a0bc6db..c81839a6e5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -29,16 +28,18 @@ protected void init() throws Exception { public void assertExplainPrettyFormatted() throws IOException { String query = StringUtils.format("SELECT firstname FROM %s", TEST_INDEX_ACCOUNT); - String notPrettyExplainOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/explainIT_format_not_pretty.json"); + String notPrettyExplainOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/explainIT_format_not_pretty.json"); String notPrettyExplainOutput = Files.toString(new File(notPrettyExplainOutputFilePath), StandardCharsets.UTF_8); assertThat(executeExplainRequest(query, ""), equalTo(notPrettyExplainOutput)); assertThat(executeExplainRequest(query, "pretty=false"), equalTo(notPrettyExplainOutput)); - String prettyExplainOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/explainIT_format_pretty.json"); + String prettyExplainOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/explainIT_format_pretty.json"); String prettyExplainOutput = Files.toString(new File(prettyExplainOutputFilePath), StandardCharsets.UTF_8); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java index 3a58b7ffc0..62a87d3bff 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -24,9 +23,7 @@ import org.opensearch.sql.legacy.exception.SqlFeatureNotImplementedException; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Integration test for syntax and semantic analysis against query by new ANTLR parser. - */ +/** Integration test for syntax and semantic analysis against query by new ANTLR parser. */ public class QueryAnalysisIT extends SQLIntegTestCase { @Override @@ -41,9 +38,7 @@ public void missingFromClauseShouldThrowSyntaxException() { @Test public void unsupportedOperatorShouldThrowSyntaxException() { - queryShouldThrowSyntaxException( - "SELECT * FROM opensearch-sql_test_index_bank WHERE age <=> 1" - ); + queryShouldThrowSyntaxException("SELECT * FROM opensearch-sql_test_index_bank WHERE age <=> 1"); } @Test @@ -51,8 +46,8 @@ public void nonExistingFieldNameShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank WHERE balance1 = 1000", "Field [balance1] cannot be found or used here." - //"Did you mean [balance]?" - ); + // "Did you mean [balance]?" + ); } @Test @@ -60,16 +55,15 @@ public void nonExistingIndexAliasShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE a.balance = 1000", "Field [a.balance] cannot be found or used here." - //"Did you mean [b.balance]?" - ); + // "Did you mean [b.balance]?" + ); } @Test public void indexJoinNonNestedFieldShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b1, b1.firstname f1", - "Operator [JOIN] cannot work with [INDEX, KEYWORD]." - ); + "Operator [JOIN] cannot work with [INDEX, KEYWORD]."); } @Test @@ -77,8 +71,7 @@ public void scalarFunctionCallWithTypoInNameShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank WHERE ABSa(age) = 1", "Function [ABSA] cannot be found or used here.", - "Did you mean [ABS]?" - ); + "Did you mean [ABS]?"); } @Test @@ -86,17 +79,16 @@ public void scalarFunctionCallWithWrongTypeArgumentShouldThrowSemanticException( queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank WHERE LOG(lastname) = 1", "Function [LOG] cannot work with [KEYWORD].", - "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE" - ); + "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE"); } @Test public void aggregateFunctionCallWithWrongNumberOfArgumentShouldThrowSemanticException() { queryShouldThrowSemanticException( - "SELECT city FROM opensearch-sql_test_index_bank GROUP BY city HAVING MAX(age, birthdate) > 1", + "SELECT city FROM opensearch-sql_test_index_bank GROUP BY city HAVING MAX(age, birthdate) >" + + " 1", "Function [MAX] cannot work with [INTEGER, DATE].", - "Usage: MAX(NUMBER T) -> T" - ); + "Usage: MAX(NUMBER T) -> T"); } @Test @@ -104,8 +96,7 @@ public void compareIntegerFieldWithBooleanShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE b.age IS FALSE", "Operator [IS] cannot work with [INTEGER, BOOLEAN].", - "Usage: Please use compatible types from each side." - ); + "Usage: Please use compatible types from each side."); } @Test @@ -113,8 +104,7 @@ public void compareNumberFieldWithStringShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE b.age >= 'test'", "Operator [>=] cannot work with [INTEGER, STRING].", - "Usage: Please use compatible types from each side." - ); + "Usage: Please use compatible types from each side."); } @Test @@ -122,43 +112,38 @@ public void compareLogFunctionCallWithNumberFieldWithStringShouldThrowSemanticEx queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE LOG(b.balance) != 'test'", "Operator [!=] cannot work with [DOUBLE, STRING].", - "Usage: Please use compatible types from each side." - ); + "Usage: Please use compatible types from each side."); } @Test public void unionNumberFieldWithStringShouldThrowSemanticException() { queryShouldThrowSemanticException( - "SELECT age FROM opensearch-sql_test_index_bank" + - " UNION SELECT address FROM opensearch-sql_test_index_bank", - "Operator [UNION] cannot work with [INTEGER, TEXT]." - ); + "SELECT age FROM opensearch-sql_test_index_bank" + + " UNION SELECT address FROM opensearch-sql_test_index_bank", + "Operator [UNION] cannot work with [INTEGER, TEXT]."); } @Test public void minusBooleanFieldWithDateShouldThrowSemanticException() { queryShouldThrowSemanticException( - "SELECT male FROM opensearch-sql_test_index_bank" + - " MINUS SELECT birthdate FROM opensearch-sql_test_index_bank", - "Operator [MINUS] cannot work with [BOOLEAN, DATE]." - ); + "SELECT male FROM opensearch-sql_test_index_bank" + + " MINUS SELECT birthdate FROM opensearch-sql_test_index_bank", + "Operator [MINUS] cannot work with [BOOLEAN, DATE]."); } @Test public void useInClauseWithIncompatibleFieldTypesShouldFail() { queryShouldThrowSemanticException( - "SELECT * FROM opensearch-sql_test_index_bank WHERE male " + - " IN (SELECT 1 FROM opensearch-sql_test_index_bank)", - "Operator [IN] cannot work with [BOOLEAN, INTEGER]." - ); + "SELECT * FROM opensearch-sql_test_index_bank WHERE male " + + " IN (SELECT 1 FROM opensearch-sql_test_index_bank)", + "Operator [IN] cannot work with [BOOLEAN, INTEGER]."); } @Test public void queryWithNestedFunctionShouldFail() { queryShouldThrowFeatureNotImplementedException( "SELECT abs(log(balance)) FROM opensearch-sql_test_index_bank", - "Nested function calls like [abs(log(balance))] are not supported yet" - ); + "Nested function calls like [abs(log(balance))] are not supported yet"); } @Test @@ -170,29 +155,24 @@ public void nestedFunctionWithMathConstantAsInnerFunctionShouldPass() { public void aggregateWithFunctionAggregatorShouldFail() { queryShouldThrowFeatureNotImplementedException( "SELECT max(log(age)) FROM opensearch-sql_test_index_bank", - "Aggregation calls with function aggregator like [max(log(age))] are not supported yet" - ); + "Aggregation calls with function aggregator like [max(log(age))] are not supported yet"); } @Test public void queryWithUnsupportedFunctionShouldFail() { queryShouldThrowFeatureNotImplementedException( "SELECT balance DIV age FROM opensearch-sql_test_index_bank", - "Operator [DIV] is not supported yet" - ); + "Operator [DIV] is not supported yet"); } @Test public void useNegativeNumberConstantShouldPass() { queryShouldPassAnalysis( - "SELECT * FROM opensearch-sql_test_index_bank " + - "WHERE age > -1 AND balance < -123.456789" - ); + "SELECT * FROM opensearch-sql_test_index_bank " + + "WHERE age > -1 AND balance < -123.456789"); } - /** - * Run the query with cluster setting changed and cleaned after complete - */ + /** Run the query with cluster setting changed and cleaned after complete */ private void runWithClusterSetting(ClusterSetting setting, Runnable query) { try { updateClusterSettings(setting); @@ -201,7 +181,8 @@ private void runWithClusterSetting(ClusterSetting setting, Runnable query) { throw new IllegalStateException( StringUtils.format("Exception raised when running with cluster setting [%s]", setting)); } finally { - // Clean up or OpenSearch will throw java.lang.AssertionError: test leaves persistent cluster metadata behind + // Clean up or OpenSearch will throw java.lang.AssertionError: test leaves persistent cluster + // metadata behind try { updateClusterSettings(setting.nullify()); } catch (IOException e) { @@ -218,20 +199,19 @@ private void queryShouldThrowSemanticException(String query, String... expectedM queryShouldThrowException(query, SemanticAnalysisException.class, expectedMsgs); } - private void queryShouldThrowFeatureNotImplementedException(String query, - String... expectedMsgs) { - queryShouldThrowExceptionWithRestStatus(query, SqlFeatureNotImplementedException.class, - SERVICE_UNAVAILABLE, expectedMsgs); + private void queryShouldThrowFeatureNotImplementedException( + String query, String... expectedMsgs) { + queryShouldThrowExceptionWithRestStatus( + query, SqlFeatureNotImplementedException.class, SERVICE_UNAVAILABLE, expectedMsgs); } - private void queryShouldThrowException(String query, Class exceptionType, - String... expectedMsgs) { + private void queryShouldThrowException( + String query, Class exceptionType, String... expectedMsgs) { queryShouldThrowExceptionWithRestStatus(query, exceptionType, BAD_REQUEST, expectedMsgs); } - private void queryShouldThrowExceptionWithRestStatus(String query, Class exceptionType, - RestStatus status, - String... expectedMsgs) { + private void queryShouldThrowExceptionWithRestStatus( + String query, Class exceptionType, RestStatus status, String... expectedMsgs) { try { executeQuery(query); Assert.fail("Expected ResponseException, but none was thrown for query: " + query); @@ -244,8 +224,8 @@ private void queryShouldThrowExceptionWithRestStatus(String query, Class } } catch (IOException e) { throw new IllegalStateException( - "Unexpected IOException raised rather than expected AnalysisException for query: " + - query); + "Unexpected IOException raised rather than expected AnalysisException for query: " + + query); } } @@ -285,5 +265,4 @@ void assertBodyContains(String content) { assertThat(body, containsString(content)); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java index c538db830f..3cf45f7419 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.anyOf; @@ -44,10 +43,13 @@ public class QueryFunctionsIT extends SQLIntegTestCase { private static final String FROM_PHRASE = "FROM " + TEST_INDEX_PHRASE; /** + * + * + *
    * TODO Looks like Math/Date Functions test all use the same query() and execute() functions
    * TODO execute/featureValueOf/hits functions are the same as used in NestedFieldQueryIT, should refactor into util
+   * 
*/ - @Override protected void init() throws Exception { loadIndex(Index.ACCOUNT); @@ -58,63 +60,39 @@ protected void init() throws Exception { @Test public void query() throws IOException { assertThat( - query( - "SELECT state", - FROM_ACCOUNTS, - "WHERE QUERY('CA')" - ), - hits( - hasValueForFields("CA", "state") - ) - ); + query("SELECT state", FROM_ACCOUNTS, "WHERE QUERY('CA')"), + hits(hasValueForFields("CA", "state"))); } @Test public void matchQueryRegularField() throws IOException { assertThat( - query( - "SELECT firstname", - FROM_ACCOUNTS, - "WHERE MATCH_QUERY(firstname, 'Ayers')" - ), - hits( - hasValueForFields("Ayers", "firstname") - ) - ); + query("SELECT firstname", FROM_ACCOUNTS, "WHERE MATCH_QUERY(firstname, 'Ayers')"), + hits(hasValueForFields("Ayers", "firstname"))); } @Test public void matchQueryNestedField() throws IOException { SearchHit[] hits = query("SELECT comment.data", FROM_NESTED, "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')") - .getHits().getHits(); + .getHits() + .getHits(); Map source = hits[0].getSourceAsMap(); // SearchHits innerHits = hits[0].getInnerHits().get("comment"); assertThat( - query( - "SELECT comment.data", - FROM_NESTED, - "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')" - ), + query("SELECT comment.data", FROM_NESTED, "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')"), hits( - anyOf(hasNestedField("comment", "data", "aa"), - hasNestedArrayField("comment", "data", "aa")) - ) - ); + anyOf( + hasNestedField("comment", "data", "aa"), + hasNestedArrayField("comment", "data", "aa")))); } @Test public void scoreQuery() throws IOException { assertThat( query( - "SELECT firstname", - FROM_ACCOUNTS, - "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)" - ), - hits( - hasValueForFields("Ayers", "firstname") - ) - ); + "SELECT firstname", FROM_ACCOUNTS, "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)"), + hits(hasValueForFields("Ayers", "firstname"))); } @Test @@ -123,42 +101,24 @@ public void scoreQueryWithNestedField() throws IOException { query( "SELECT comment.data", FROM_NESTED, - "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)" - ), + "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)"), hits( - //hasValueForFields("ab", "comment.data") - hasNestedField("comment", - "data", "ab") - ) - ); + // hasValueForFields("ab", "comment.data") + hasNestedField("comment", "data", "ab"))); } @Test public void wildcardQuery() throws IOException { assertThat( - query( - "SELECT city", - FROM_ACCOUNTS, - "WHERE WILDCARD_QUERY(city.keyword, 'B*')" - ), - hits( - hasFieldWithPrefix("city", "B") - ) - ); + query("SELECT city", FROM_ACCOUNTS, "WHERE WILDCARD_QUERY(city.keyword, 'B*')"), + hits(hasFieldWithPrefix("city", "B"))); } @Test public void matchPhraseQuery() throws IOException { assertThat( - query( - "SELECT phrase", - FROM_PHRASE, - "WHERE MATCH_PHRASE(phrase, 'brown fox')" - ), - hits( - hasValueForFields("brown fox", "phrase") - ) - ); + query("SELECT phrase", FROM_PHRASE, "WHERE MATCH_PHRASE(phrase, 'brown fox')"), + hits(hasValueForFields("brown fox", "phrase"))); } @Test @@ -167,12 +127,8 @@ public void multiMatchQuerySingleField() throws IOException { query( "SELECT firstname", FROM_ACCOUNTS, - "WHERE MULTI_MATCH('query'='Ayers', 'fields'='firstname')" - ), - hits( - hasValueForFields("Ayers", "firstname") - ) - ); + "WHERE MULTI_MATCH('query'='Ayers', 'fields'='firstname')"), + hits(hasValueForFields("Ayers", "firstname"))); } @Test @@ -181,36 +137,30 @@ public void multiMatchQueryWildcardField() throws IOException { query( "SELECT firstname, lastname", FROM_ACCOUNTS, - "WHERE MULTI_MATCH('query'='Bradshaw', 'fields'='*name')" - ), - hits( - hasValueForFields("Bradshaw", "firstname", "lastname") - ) - ); + "WHERE MULTI_MATCH('query'='Bradshaw', 'fields'='*name')"), + hits(hasValueForFields("Bradshaw", "firstname", "lastname"))); } @Test public void numberLiteralInSelectField() { assertTrue( - executeQuery(StringUtils.format("SELECT 234234 AS number from %s", TEST_INDEX_ACCOUNT), - "jdbc") - .contains("234234") - ); + executeQuery( + StringUtils.format("SELECT 234234 AS number from %s", TEST_INDEX_ACCOUNT), "jdbc") + .contains("234234")); assertTrue( - executeQuery(StringUtils.format("SELECT 2.34234 AS number FROM %s", TEST_INDEX_ACCOUNT), - "jdbc") - .contains("2.34234") - ); + executeQuery( + StringUtils.format("SELECT 2.34234 AS number FROM %s", TEST_INDEX_ACCOUNT), "jdbc") + .contains("2.34234")); } private final Matcher hits(Matcher subMatcher) { - return featureValueOf("hits", everyItem(subMatcher), - resp -> Arrays.asList(resp.getHits().getHits())); + return featureValueOf( + "hits", everyItem(subMatcher), resp -> Arrays.asList(resp.getHits().getHits())); } - private FeatureMatcher featureValueOf(String name, Matcher subMatcher, - Function getter) { + private FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { return new FeatureMatcher(subMatcher, name, name) { @Override protected U featureValueOf(T actual) { @@ -220,6 +170,9 @@ protected U featureValueOf(T actual) { } /** + * + * + *
    * Create Matchers for each field and its value
    * Only one of the Matchers need to match (per hit)
    * 

@@ -228,36 +181,34 @@ protected U featureValueOf(T actual) { *

* Then the value "Ayers" can be found in either the firstname or lastname field. Only one of these fields * need to satisfy the query value to be evaluated as correct expected output. + *

* - * @param value The value to match for a field in the sourceMap + * @param value The value to match for a field in the sourceMap * @param fields A list of fields to match */ @SafeVarargs private final Matcher hasValueForFields(String value, String... fields) { return anyOf( - Arrays.asList(fields). - stream(). - map(field -> kv(field, is(value))). - collect(Collectors.toList())); + Arrays.asList(fields).stream() + .map(field -> kv(field, is(value))) + .collect(Collectors.toList())); } private final Matcher hasFieldWithPrefix(String field, String prefix) { - return featureValueOf(field, startsWith(prefix), - hit -> (String) hit.getSourceAsMap().get(field)); + return featureValueOf( + field, startsWith(prefix), hit -> (String) hit.getSourceAsMap().get(field)); } private final Matcher hasNestedField(String path, String field, String value) { - return featureValueOf(field, is(value), - hit -> ((HashMap) hit.getSourceAsMap().get(path)).get(field)); + return featureValueOf( + field, is(value), hit -> ((HashMap) hit.getSourceAsMap().get(path)).get(field)); } private final Matcher hasNestedArrayField(String path, String field, String value) { return new BaseMatcher() { @Override - public void describeTo(Description description) { - - } + public void describeTo(Description description) {} @Override public boolean matches(Object item) { @@ -275,7 +226,7 @@ private Matcher kv(String key, Matcher valMatcher) { } /*********************************************************** - Query Utility to Fetch Response for SQL + * Query Utility to Fetch Response for SQL ***********************************************************/ private SearchResponse query(String select, String from, String... statements) @@ -286,10 +237,11 @@ private SearchResponse query(String select, String from, String... statements) private SearchResponse execute(String sql) throws IOException { final JSONObject jsonObject = executeQuery(sql); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java index f99285a90b..71795b1fb7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.allOf; @@ -44,6 +43,9 @@ public class QueryIT extends SQLIntegTestCase { /** + * + * + *
    * Currently commenting out tests related to JoinType index since there is an issue with mapping.
    * 

* Also ignoring the following tests as they are failing, will require investigation: @@ -57,10 +59,11 @@ public class QueryIT extends SQLIntegTestCase { * The following tests are being ignored because subquery is still running in OpenSearch transport thread: * - twoSubQueriesTest() * - inTermsSubQueryTest() + *

*/ + static final int BANK_INDEX_MALE_TRUE = 4; - final static int BANK_INDEX_MALE_TRUE = 4; - final static int BANK_INDEX_MALE_FALSE = 3; + static final int BANK_INDEX_MALE_FALSE = 3; @Override protected void init() throws Exception { @@ -87,92 +90,81 @@ public void queryEndWithSemiColonTest() { @Test public void searchTypeTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s LIMIT 1000", TestsConstants.TEST_INDEX_PHRASE)); Assert.assertTrue(response.has("hits")); Assert.assertEquals(6, getTotalHits(response)); } @Test public void multipleFromTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s, %s LIMIT 2000", - TestsConstants.TEST_INDEX_BANK, TestsConstants.TEST_INDEX_BANK_TWO)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s, %s LIMIT 2000", + TestsConstants.TEST_INDEX_BANK, + TestsConstants.TEST_INDEX_BANK_TWO)); Assert.assertTrue(response.has("hits")); Assert.assertEquals(14, getTotalHits(response)); } @Test public void selectAllWithFieldReturnsAll() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format("SELECT *, age FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithFieldReverseOrder() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format("SELECT *, age FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithMultipleFields() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age, address " + - "FROM %s " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age, address FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithFieldAndOrderBy() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "ORDER BY age " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age FROM %s ORDER BY age LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithFieldAndGroupBy() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "GROUP BY age " + - "LIMIT 10", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age FROM %s GROUP BY age LIMIT 10", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldAggregationResponseSize(response, "age"); } @Test public void selectAllWithFieldAndGroupByReverseOrder() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "GROUP BY age " + - "LIMIT 10", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age FROM %s GROUP BY age LIMIT 10", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldAggregationResponseSize(response, "age"); } @@ -180,14 +172,16 @@ public void selectAllWithFieldAndGroupByReverseOrder() throws IOException { @Test public void selectFieldWithAliasAndGroupBy() { String response = - executeQuery("SELECT lastname AS name FROM " + TEST_INDEX_ACCOUNT + " GROUP BY name", - "jdbc"); + executeQuery( + "SELECT lastname AS name FROM " + TEST_INDEX_ACCOUNT + " GROUP BY name", "jdbc"); assertThat(response, containsString("\"alias\": \"name\"")); } public void indexWithWildcardTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s* LIMIT 1000", - TestsConstants.TEST_INDEX_BANK)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s* LIMIT 1000", TestsConstants.TEST_INDEX_BANK)); Assert.assertTrue(response.has("hits")); assertThat(getTotalHits(response), greaterThan(0)); } @@ -198,8 +192,8 @@ public void selectSpecificFields() throws IOException { Set expectedSource = new HashSet<>(Arrays.asList(arr)); JSONObject response = - executeQuery(String.format(Locale.ROOT, "SELECT age, account_number FROM %s", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format(Locale.ROOT, "SELECT age, account_number FROM %s", TEST_INDEX_ACCOUNT)); assertResponseForSelectSpecificFields(response, expectedSource); } @@ -209,8 +203,9 @@ public void selectSpecificFieldsUsingTableAlias() throws IOException { Set expectedSource = new HashSet<>(Arrays.asList(arr)); JSONObject response = - executeQuery(String.format(Locale.ROOT, "SELECT a.age, a.account_number FROM %s a", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format( + Locale.ROOT, "SELECT a.age, a.account_number FROM %s a", TEST_INDEX_ACCOUNT)); assertResponseForSelectSpecificFields(response, expectedSource); } @@ -219,15 +214,18 @@ public void selectSpecificFieldsUsingTableNamePrefix() throws IOException { String[] arr = new String[] {"age", "account_number"}; Set expectedSource = new HashSet<>(Arrays.asList(arr)); - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT opensearch-sql_test_index_account.age, opensearch-sql_test_index_account.account_number" + - " FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT opensearch-sql_test_index_account.age," + + " opensearch-sql_test_index_account.account_number FROM %s", + TEST_INDEX_ACCOUNT)); assertResponseForSelectSpecificFields(response, expectedSource); } - private void assertResponseForSelectSpecificFields(JSONObject response, - Set expectedSource) { + private void assertResponseForSelectSpecificFields( + JSONObject response, Set expectedSource) { JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); @@ -240,9 +238,12 @@ public void selectFieldWithSpace() throws IOException { String[] arr = new String[] {"test field"}; Set expectedSource = new HashSet<>(Arrays.asList(arr)); - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT ['test field'] FROM %s " + - "WHERE ['test field'] IS NOT null", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT ['test field'] FROM %s WHERE ['test field'] IS NOT null", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -259,19 +260,28 @@ public void selectAliases() throws IOException { String[] arr = new String[] {"myage", "myaccount_number"}; Set expectedSource = new HashSet<>(Arrays.asList(arr)); - JSONObject result = executeQuery(String.format(Locale.ROOT, - "SELECT age AS myage, account_number AS myaccount_number FROM %s", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age AS myage, account_number AS myaccount_number FROM %s", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(result); - hits.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; - Assert.assertEquals(expectedSource, hit.getJSONObject("_source").keySet()); - }); + hits.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; + Assert.assertEquals(expectedSource, hit.getJSONObject("_source").keySet()); + }); } @Test public void useTableAliasInWhereClauseTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s a WHERE a.city = 'Nogal' LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s a WHERE a.city = 'Nogal' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -280,8 +290,12 @@ public void useTableAliasInWhereClauseTest() throws IOException { @Test public void notUseTableAliasInWhereClauseTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s a WHERE city = 'Nogal' LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s a WHERE city = 'Nogal' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -290,10 +304,13 @@ public void notUseTableAliasInWhereClauseTest() throws IOException { @Test public void useTableNamePrefixInWhereClauseTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE opensearch-sql_test_index_account.city = 'Nogal' LIMIT 1000", - TEST_INDEX_ACCOUNT - )); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE opensearch-sql_test_index_account.city = 'Nogal' LIMIT" + + " 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -302,8 +319,12 @@ public void useTableNamePrefixInWhereClauseTest() throws IOException { @Test public void equalityTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE city = 'Nogal' LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE city = 'Nogal' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -312,9 +333,12 @@ public void equalityTest() throws IOException { @Test public void equalityTestPhrase() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s WHERE " + - "match_phrase(phrase, 'quick fox here') LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here') LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -324,10 +348,13 @@ public void equalityTestPhrase() throws IOException { @Test public void greaterThanTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age > %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age > %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + someAge)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -340,10 +367,13 @@ public void greaterThanTest() throws IOException { @Test public void greaterThanOrEqualTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age >= %s LIMIT 1000", - TEST_INDEX_ACCOUNT, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age >= %s LIMIT 1000", + TEST_INDEX_ACCOUNT, + someAge)); boolean isEqualFound = false; JSONArray hits = getHits(response); @@ -352,24 +382,27 @@ public void greaterThanOrEqualTest() throws IOException { int age = getSource(hit).getInt("age"); assertThat(age, greaterThanOrEqualTo(someAge)); - if (age == someAge) { - isEqualFound = true; - } + if (age == someAge) { + isEqualFound = true; + } } Assert.assertTrue( - String.format(Locale.ROOT, "At least one of the documents need to contains age equal to %s", - someAge), + String.format( + Locale.ROOT, "At least one of the documents need to contains age equal to %s", someAge), isEqualFound); } @Test public void lessThanTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age < %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age < %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + someAge)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -382,10 +415,13 @@ public void lessThanTest() throws IOException { @Test public void lessThanOrEqualTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age <= %s LIMIT 1000", - TEST_INDEX_ACCOUNT, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age <= %s LIMIT 1000", + TEST_INDEX_ACCOUNT, + someAge)); boolean isEqualFound = false; JSONArray hits = getHits(response); @@ -394,32 +430,39 @@ public void lessThanOrEqualTest() throws IOException { int age = getSource(hit).getInt("age"); assertThat(age, lessThanOrEqualTo(someAge)); - if (age == someAge) { - isEqualFound = true; - } + if (age == someAge) { + isEqualFound = true; + } } Assert.assertTrue( - String.format(Locale.ROOT, "At least one of the documents need to contains age equal to %s", - someAge), + String.format( + Locale.ROOT, "At least one of the documents need to contains age equal to %s", someAge), isEqualFound); } @Test public void orTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE match_phrase(gender, 'F') OR match_phrase(gender, 'M') " + - "LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE match_phrase(gender, 'F') OR match_phrase(gender, 'M') " + + "LIMIT 1000", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(1000, getTotalHits(response)); } @Test public void andTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age=32 AND gender='M' LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age=32 AND gender='M' LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -431,9 +474,12 @@ public void andTest() throws IOException { @Test public void likeTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE firstname LIKE 'amb%%' LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE firstname LIKE 'amb%%' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -442,9 +488,12 @@ public void likeTest() throws IOException { @Test public void notLikeTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE firstname NOT LIKE 'amb%%'", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE firstname NOT LIKE 'amb%%'", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertNotEquals(0, getTotalHits(response)); @@ -456,11 +505,13 @@ public void notLikeTest() throws IOException { @Test public void regexQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE dog_name = REGEXP_QUERY('sn.*', 'INTERSECTION|COMPLEMENT|EMPTY', 10000)", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE dog_name = REGEXP_QUERY('sn.*'," + + " 'INTERSECTION|COMPLEMENT|EMPTY', 10000)", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -473,11 +524,13 @@ public void regexQueryTest() throws IOException { @Test public void negativeRegexQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE NOT(dog_name = REGEXP_QUERY('sn.*', 'INTERSECTION|COMPLEMENT|EMPTY', 10000))", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT(dog_name = REGEXP_QUERY('sn.*'," + + " 'INTERSECTION|COMPLEMENT|EMPTY', 10000))", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -489,28 +542,36 @@ public void negativeRegexQueryTest() throws IOException { @Test public void doubleNotTest() throws IOException { - JSONObject response1 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND NOT gender LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response1 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND NOT gender LIKE 'f'", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response1)); - JSONObject response2 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND gender NOT LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response2 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND gender NOT LIKE 'f'", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response2)); - JSONObject response3 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE gender NOT LIKE 'm' AND gender NOT LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response3 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE gender NOT LIKE 'm' AND gender NOT LIKE 'f'", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response3)); - JSONObject response4 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE gender LIKE 'm' AND NOT gender LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response4 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE gender LIKE 'm' AND NOT gender LIKE 'f'", + TEST_INDEX_ACCOUNT)); // Assert there are results and they all have gender 'm' Assert.assertNotEquals(0, getTotalHits(response4)); JSONArray hits = getHits(response4); @@ -519,16 +580,19 @@ public void doubleNotTest() throws IOException { Assert.assertEquals("m", getSource(hit).getString("gender").toLowerCase()); } - JSONObject response5 = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE NOT (gender = 'm' OR gender = 'f')", - TEST_INDEX_ACCOUNT)); + JSONObject response5 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT (gender = 'm' OR gender = 'f')", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response5)); } @Test public void limitTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 30", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 30", TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(30, hits.length()); @@ -538,9 +602,14 @@ public void limitTest() throws IOException { public void betweenTest() throws IOException { int min = 27; int max = 30; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age BETWEEN %s AND %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, min, max)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age BETWEEN %s AND %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + min, + max)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -556,9 +625,14 @@ public void betweenTest() throws IOException { public void notBetweenTest() throws IOException { int min = 20; int max = 37; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age NOT BETWEEN %s AND %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, min, max)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age NOT BETWEEN %s AND %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + min, + max)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -575,9 +649,12 @@ public void notBetweenTest() throws IOException { @Test public void inTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s WHERE age IN (20, 22) LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age FROM %s WHERE age IN (20, 22) LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -589,10 +666,12 @@ public void inTest() throws IOException { @Test public void inTestWithStrings() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT phrase FROM %s WHERE phrase IN ('quick', 'fox') LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT phrase FROM %s WHERE phrase IN ('quick', 'fox') LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -604,12 +683,15 @@ public void inTestWithStrings() throws IOException { @Test public void inTermsTestWithIdentifiersTreatedLikeStrings() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(2, getTotalHits(response)); @@ -622,12 +704,15 @@ public void inTermsTestWithIdentifiersTreatedLikeStrings() throws IOException { @Test public void inTermsTestWithStrings() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(2, getTotalHits(response)); @@ -640,12 +725,15 @@ public void inTermsTestWithStrings() throws IOException { @Test public void inTermsWithNumbers() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.ofHisName = IN_TERMS(4,2) " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.ofHisName = IN_TERMS(4,2) " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -657,10 +745,12 @@ public void inTermsWithNumbers() throws IOException { @Test public void termQueryWithNumber() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT name FROM %s WHERE name.ofHisName = term(4) LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name FROM %s WHERE name.ofHisName = term(4) LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -672,12 +762,15 @@ public void termQueryWithNumber() throws IOException { @Test public void termQueryWithStringIdentifier() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = term('brandon') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = term('brandon') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -689,12 +782,15 @@ public void termQueryWithStringIdentifier() throws IOException { @Test public void termQueryWithStringLiteral() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = term('brandon') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = term('brandon') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -708,9 +804,12 @@ public void termQueryWithStringLiteral() throws IOException { // are returned as well. This may be incorrect behavior. @Test public void notInTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s WHERE age NOT IN (20, 22) LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age FROM %s WHERE age NOT IN (20, 22) LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -730,9 +829,12 @@ public void dateSearch() throws IOException { DateTimeFormatter formatter = DateTimeFormat.forPattern(TestsConstants.DATE_FORMAT); DateTime dateToCompare = new DateTime(2014, 8, 18, 0, 0, 0); - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT insert_time FROM %s WHERE insert_time < '2014-08-18'", - TestsConstants.TEST_INDEX_ONLINE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s WHERE insert_time < '2014-08-18'", + TestsConstants.TEST_INDEX_ONLINE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); @@ -740,8 +842,8 @@ public void dateSearch() throws IOException { DateTime insertTime = formatter.parseDateTime(source.getString("insert_time")); String errorMessage = - String.format(Locale.ROOT, "insert_time must be before 2014-08-18. Found: %s", - insertTime); + String.format( + Locale.ROOT, "insert_time must be before 2014-08-18. Found: %s", insertTime); Assert.assertTrue(errorMessage, insertTime.isBefore(dateToCompare)); } } @@ -751,10 +853,12 @@ public void dateSearchBraces() throws IOException { DateTimeFormatter formatter = DateTimeFormat.forPattern(TestsConstants.TS_DATE_FORMAT); DateTime dateToCompare = new DateTime(2015, 3, 15, 0, 0, 0); - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT odbc_time FROM %s WHERE odbc_time < {ts '2015-03-15 00:00:00.000'}", - TestsConstants.TEST_INDEX_ODBC)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT odbc_time FROM %s WHERE odbc_time < {ts '2015-03-15 00:00:00.000'}", + TestsConstants.TEST_INDEX_ODBC)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); @@ -764,8 +868,8 @@ public void dateSearchBraces() throws IOException { DateTime insertTime = formatter.parseDateTime(insertTimeStr); String errorMessage = - String.format(Locale.ROOT, "insert_time must be before 2015-03-15. Found: %s", - insertTime); + String.format( + Locale.ROOT, "insert_time must be before 2015-03-15. Found: %s", insertTime); Assert.assertTrue(errorMessage, insertTime.isBefore(dateToCompare)); } } @@ -777,20 +881,24 @@ public void dateBetweenSearch() throws IOException { DateTime dateLimit1 = new DateTime(2014, 8, 18, 0, 0, 0); DateTime dateLimit2 = new DateTime(2014, 8, 21, 0, 0, 0); - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT insert_time " + - "FROM %s " + - "WHERE insert_time BETWEEN '2014-08-18' AND '2014-08-21' " + - "LIMIT 3", - TestsConstants.TEST_INDEX_ONLINE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time " + + "FROM %s " + + "WHERE insert_time BETWEEN '2014-08-18' AND '2014-08-21' " + + "LIMIT 3", + TestsConstants.TEST_INDEX_ONLINE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); JSONObject source = getSource(hit); DateTime insertTime = formatter.parseDateTime(source.getString("insert_time")); - boolean isBetween = (insertTime.isAfter(dateLimit1) || insertTime.isEqual(dateLimit1)) && - (insertTime.isBefore(dateLimit2) || insertTime.isEqual(dateLimit2)); + boolean isBetween = + (insertTime.isAfter(dateLimit1) || insertTime.isEqual(dateLimit1)) + && (insertTime.isBefore(dateLimit2) || insertTime.isEqual(dateLimit2)); Assert.assertTrue("insert_time must be between 2014-08-18 and 2014-08-21", isBetween); } @@ -798,9 +906,12 @@ public void dateBetweenSearch() throws IOException { @Test public void missFilterSearch() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE insert_time2 IS missing", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE insert_time2 IS missing", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); Assert.assertEquals(4, getTotalHits(response)); @@ -814,9 +925,12 @@ public void missFilterSearch() throws IOException { @Test public void notMissFilterSearch() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE insert_time2 IS NOT missing", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE insert_time2 IS NOT missing", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); Assert.assertEquals(2, getTotalHits(response)); @@ -830,15 +944,19 @@ public void notMissFilterSearch() throws IOException { @Test public void complexConditionQuery() throws IOException { - String errorMessage = "Result does not exist to the condition " + - "(gender='m' AND (age> 25 OR account_number>5)) OR (gender='f' AND (age>30 OR account_number < 8)"; + String errorMessage = + "Result does not exist to the condition (gender='m' AND (age> 25 OR account_number>5)) OR" + + " (gender='f' AND (age>30 OR account_number < 8)"; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE (gender='m' AND (age> 25 OR account_number>5)) " + - "OR (gender='f' AND (age>30 OR account_number < 8))", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE (gender='m' AND (age> 25 OR account_number>5)) " + + "OR (gender='f' AND (age>30 OR account_number < 8))", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -849,7 +967,8 @@ public void complexConditionQuery() throws IOException { int age = source.getInt("age"); int accountNumber = source.getInt("account_number"); - Assert.assertTrue(errorMessage, + Assert.assertTrue( + errorMessage, (gender.equals("m") && (age > 25 || accountNumber > 5)) || (gender.equals("f") && (age > 30 || accountNumber < 8))); } @@ -857,16 +976,20 @@ public void complexConditionQuery() throws IOException { @Test public void complexNotConditionQuery() throws IOException { - String errorMessage = "Result does not exist to the condition " + - "NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + - "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))"; + String errorMessage = + "Result does not exist to the condition " + + "NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + + "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))"; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + - "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + + "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertNotEquals(0, hits.length()); @@ -878,7 +1001,8 @@ public void complexNotConditionQuery() throws IOException { int age = source.getInt("age"); int accountNumber = source.getInt("account_number"); - Assert.assertTrue(errorMessage, + Assert.assertTrue( + errorMessage, !(gender.equals("m") && !(age > 25 || accountNumber > 5)) || (!gender.equals("f") && !(age > 30 || accountNumber < 8))); } @@ -887,9 +1011,10 @@ public void complexNotConditionQuery() throws IOException { @Test @SuppressWarnings("unchecked") public void orderByAscTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s ORDER BY age ASC LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT age FROM %s ORDER BY age ASC LIMIT 1000", TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); ArrayList ages = new ArrayList<>(); @@ -907,17 +1032,23 @@ public void orderByAscTest() throws IOException { @Test public void orderByDescTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s ORDER BY age DESC LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age FROM %s ORDER BY age DESC LIMIT 1000", + TEST_INDEX_ACCOUNT)); assertResponseForOrderByTest(response); } @Test public void orderByDescUsingTableAliasTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT a.age FROM %s a ORDER BY a.age DESC LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT a.age FROM %s a ORDER BY a.age DESC LIMIT 1000", + TEST_INDEX_ACCOUNT)); assertResponseForOrderByTest(response); } @@ -940,13 +1071,16 @@ private void assertResponseForOrderByTest(JSONObject response) { @Test @SuppressWarnings("unchecked") public void orderByAscFieldWithSpaceTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE `test field` IS NOT null " + - "ORDER BY `test field` ASC " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE `test field` IS NOT null " + + "ORDER BY `test field` ASC " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); ArrayList testFields = new ArrayList<>(); @@ -964,195 +1098,175 @@ public void orderByAscFieldWithSpaceTest() throws IOException { @Test public void testWhereWithBoolEqualsTrue() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = true LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolEqualsTrueAndGroupBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = true GROUP BY balance LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolEqualsTrueAndOrderBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true " + - "ORDER BY age " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = true ORDER BY age LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolIsTrue() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS true " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male IS true GROUP BY balance LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolIsNotTrue() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS NOT true " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + + "FROM %s " + + "WHERE male IS NOT true " + + "GROUP BY balance " + + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolEqualsFalse() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = false " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = false LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolEqualsFalseAndGroupBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = false " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = false GROUP BY balance LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolEqualsFalseAndOrderBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = false " + - "ORDER BY age " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = false ORDER BY age LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolIsFalse() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS false " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male IS false GROUP BY balance LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolIsNotFalse() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS NOT false " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + + "FROM %s " + + "WHERE male IS NOT false " + + "GROUP BY balance " + + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testMultiPartWhere() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE (firstname LIKE 'opal' OR firstname LIKE 'rodriquez') " + - "AND (state like 'oh' OR state like 'hi')", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE (firstname LIKE 'opal' OR firstname LIKE 'rodriquez') " + + "AND (state like 'oh' OR state like 'hi')", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(2, getTotalHits(response)); } @Test public void testMultiPartWhere2() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE ((account_number > 200 AND account_number < 300) OR gender LIKE 'm') " + - "AND (state LIKE 'hi' OR address LIKE 'avenue')", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE ((account_number > 200 AND account_number < 300) OR gender LIKE 'm') " + + "AND (state LIKE 'hi' OR address LIKE 'avenue')", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(127, getTotalHits(response)); } @Test public void testMultiPartWhere3() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE ((account_number > 25 AND account_number < 75) AND age >35 ) " + - "AND (state LIKE 'md' OR (address LIKE 'avenue' OR address LIKE 'street'))", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE ((account_number > 25 AND account_number < 75) AND age >35 ) " + + "AND (state LIKE 'md' OR (address LIKE 'avenue' OR address LIKE 'street'))", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(7, getTotalHits(response)); } @Test public void filterPolygonTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE GEO_INTERSECTS(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE GEO_INTERSECTS(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1163,10 +1277,12 @@ public void filterPolygonTest() throws IOException { @Test public void boundingBox() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE GEO_BOUNDING_BOX(center, 100.0, 1.0, 101, 0.0)", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE GEO_BOUNDING_BOX(center, 100.0, 1.0, 101, 0.0)", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1177,10 +1293,12 @@ public void boundingBox() throws IOException { @Test public void geoDistance() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE GEO_DISTANCE(center, '1km', 100.5, 0.500001)", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE GEO_DISTANCE(center, '1km', 100.5, 0.500001)", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1191,10 +1309,12 @@ public void geoDistance() throws IOException { @Test public void geoPolygon() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE GEO_POLYGON(center, 100,0, 100.5, 2, 101.0,0)", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE GEO_POLYGON(center, 100,0, 100.5, 2, 101.0,0)", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1206,36 +1326,45 @@ public void geoPolygon() throws IOException { @Ignore @Test public void escapedCharactersCheck() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE MATCH_PHRASE(nickname, 'Daenerys \"Stormborn\"') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE MATCH_PHRASE(nickname, 'Daenerys \"Stormborn\"') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void complexObjectSearch() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE MATCH_PHRASE(name.firstname, 'Jaime') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE MATCH_PHRASE(name.firstname, 'Jaime') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void complexObjectReturnField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT parents.father " + - "FROM %s " + - "WHERE MATCH_PHRASE(name.firstname, 'Brandon') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT parents.father " + + "FROM %s " + + "WHERE MATCH_PHRASE(name.firstname, 'Brandon') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1246,14 +1375,18 @@ public void complexObjectReturnField() throws IOException { /** * TODO: Fields prefixed with @ gets converted to SQLVariantRefExpr instead of SQLIdentifierExpr - * Either change SQLVariantRefExpr to SQLIdentifierExpr - * Or handle the special case for SQLVariantRefExpr + * Either change SQLVariantRefExpr to SQLIdentifierExpr Or handle the special case for + * SQLVariantRefExpr */ @Ignore @Test public void queryWithAtFieldOnWhere() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s where @wolf = 'Summer' LIMIT 1000", TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s where @wolf = 'Summer' LIMIT 1000", + TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); JSONObject hit = getHits(response).getJSONObject(0); Assert.assertEquals("Summer", hit.get("@wolf")); @@ -1265,19 +1398,22 @@ public void queryWithDotAtStartOfIndexName() throws Exception { TestUtils.createHiddenIndexByRestClient(client(), ".bank", null); TestUtils.loadDataByRestClient(client(), ".bank", "/src/test/resources/.bank.json"); - String response = executeQuery("SELECT education FROM .bank WHERE account_number = 12345", - "jdbc"); + String response = + executeQuery("SELECT education FROM .bank WHERE account_number = 12345", "jdbc"); Assert.assertTrue(response.contains("PhD")); } @Test public void notLikeTests() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname NOT LIKE 'd%%' AND name IS NOT NULL " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname NOT LIKE 'd%%' AND name IS NOT NULL " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(3, getTotalHits(response)); @@ -1286,45 +1422,49 @@ public void notLikeTests() throws IOException { JSONObject source = getSource(hit); String name = source.getJSONObject("name").getString("firstname"); - Assert - .assertFalse(String.format(Locale.ROOT, "Name [%s] should not match pattern [d%%]", name), - name.startsWith("d")); + Assert.assertFalse( + String.format(Locale.ROOT, "Name [%s] should not match pattern [d%%]", name), + name.startsWith("d")); } } @Test public void isNullTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE nickname IS NULL " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name FROM %s WHERE nickname IS NULL LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(6, getTotalHits(response)); } @Test public void isNotNullTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE nickname IS NOT NULL " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name FROM %s WHERE nickname IS NOT NULL LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void innerQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s D " + - "WHERE holdersName IN (SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Hattie')", - TestsConstants.TEST_INDEX_DOG, TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s D " + + "WHERE holdersName IN (SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Hattie')", + TestsConstants.TEST_INDEX_DOG, + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1339,19 +1479,22 @@ public void innerQueryTest() throws IOException { @Ignore @Test public void twoSubQueriesTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE holdersName IN (SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Hattie') " + - "AND age IN (SELECT name.ofHisName " + - "FROM %s " + - "WHERE name.firstname <> 'Daenerys' " + - "AND name.ofHisName IS NOT NULL) ", - TestsConstants.TEST_INDEX_DOG, - TEST_INDEX_ACCOUNT, - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE holdersName IN (SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Hattie') " + + "AND age IN (SELECT name.ofHisName " + + "FROM %s " + + "WHERE name.firstname <> 'Daenerys' " + + "AND name.ofHisName IS NOT NULL) ", + TestsConstants.TEST_INDEX_DOG, + TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1366,14 +1509,18 @@ public void twoSubQueriesTest() throws IOException { @Ignore @Test public void inTermsSubQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE age = IN_TERMS (SELECT name.ofHisName " + - "FROM %s " + - "WHERE name.firstname <> 'Daenerys' " + - "AND name.ofHisName IS NOT NULL)", - TestsConstants.TEST_INDEX_DOG, TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE age = IN_TERMS (SELECT name.ofHisName " + + "FROM %s " + + "WHERE name.firstname <> 'Daenerys' " + + "AND name.ofHisName IS NOT NULL)", + TestsConstants.TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1388,9 +1535,12 @@ public void inTermsSubQueryTest() throws IOException { @Ignore @Test public void idsQueryOneId() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1)", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1)", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1405,9 +1555,12 @@ public void idsQueryOneId() throws IOException { @Ignore @Test public void idsQueryMultipleId() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1, 2, 3)", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1, 2, 3)", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1422,14 +1575,18 @@ public void idsQueryMultipleId() throws IOException { @Ignore @Test public void idsQuerySubQueryIds() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE _id = IDS_QUERY(dog, (SELECT name.ofHisName " + - "FROM %s " + - "WHERE name.firstname <> 'Daenerys' " + - "AND name.ofHisName IS NOT NULL))", - TestsConstants.TEST_INDEX_DOG, TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE _id = IDS_QUERY(dog, (SELECT name.ofHisName " + + "FROM %s " + + "WHERE name.firstname <> 'Daenerys' " + + "AND name.ofHisName IS NOT NULL))", + TestsConstants.TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1443,18 +1600,24 @@ public void idsQuerySubQueryIds() throws IOException { @Test public void nestedEqualsTestFieldNormalField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE nested(message.info)='b'", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE nested(message.info)='b'", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void nestedEqualsTestFieldInsideArrays() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE nested(message.info) = 'a'", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE nested(message.info) = 'a'", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(2, getTotalHits(response)); } @@ -1462,106 +1625,124 @@ public void nestedEqualsTestFieldInsideArrays() throws IOException { @Ignore // Seems like we don't support nested with IN, throwing IllegalArgumentException @Test public void nestedOnInQuery() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s where nested(message.info) IN ('a','b')", TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s where nested(message.info) IN ('a','b')", + TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(3, getTotalHits(response)); } @Test public void complexNestedQueryBothOnSameObject() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE nested('message', message.info = 'a' AND message.author ='i')", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE nested('message', message.info = 'a' AND message.author ='i')", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void complexNestedQueryNotBothOnSameObject() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE nested('message', message.info = 'a' AND message.author ='h')", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE nested('message', message.info = 'a' AND message.author ='h')", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(0, getTotalHits(response)); } @Test public void nestedOnInTermsQuery() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE nested(message.info) = IN_TERMS('a', 'b')", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE nested(message.info) = IN_TERMS('a', 'b')", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(3, getTotalHits(response)); } // TODO Uncomment these after problem with loading join index is resolved -// @Test -// public void childrenEqualsTestFieldNormalField() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info) = 'b'", TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(1, getTotalHits(response)); -// } -// -// @Test -// public void childrenOnInQuery() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info) IN ('a', 'b')", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(2, getTotalHits(response)); -// } -// -// @Test -// public void complexChildrenQueryBothOnSameObject() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info = 'a' AND author ='e')", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(1, getTotalHits(response)); -// } -// -// @Test -// public void complexChildrenQueryNotOnSameObject() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info = 'a' AND author ='j')", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(0, getTotalHits(response)); -// } -// -// @Test -// public void childrenOnInTermsQuery() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info) = IN_TERMS(a, b)", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(2, getTotalHits(response)); -// } + // @Test + // public void childrenEqualsTestFieldNormalField() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info) = 'b'", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(1, getTotalHits(response)); + // } + // + // @Test + // public void childrenOnInQuery() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info) IN ('a', 'b')", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(2, getTotalHits(response)); + // } + // + // @Test + // public void complexChildrenQueryBothOnSameObject() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info = 'a' AND author + // ='e')", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(1, getTotalHits(response)); + // } + // + // @Test + // public void complexChildrenQueryNotOnSameObject() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info = 'a' AND author + // ='j')", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(0, getTotalHits(response)); + // } + // + // @Test + // public void childrenOnInTermsQuery() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info) = IN_TERMS(a, b)", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(2, getTotalHits(response)); + // } @Ignore // the hint does not really work, NoSuchIndexException is thrown @Test public void multipleIndicesOneNotExistWithHint() throws IOException { - JSONObject response = executeQuery(String - .format(Locale.ROOT, "SELECT /*! IGNORE_UNAVAILABLE */ * FROM %s,%s ", TEST_INDEX_ACCOUNT, - "badindex")); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT /*! IGNORE_UNAVAILABLE */ * FROM %s,%s ", + TEST_INDEX_ACCOUNT, + "badindex")); Assert.assertTrue(getTotalHits(response) > 0); } @@ -1573,8 +1754,8 @@ public void multipleIndicesOneNotExistWithoutHint() throws IOException { String.format(Locale.ROOT, "SELECT * FROM %s, %s", TEST_INDEX_ACCOUNT, "badindex")); Assert.fail("Expected exception, but call succeeded"); } catch (ResponseException e) { - Assert.assertEquals(RestStatus.BAD_REQUEST.getStatus(), - e.getResponse().getStatusLine().getStatusCode()); + Assert.assertEquals( + RestStatus.BAD_REQUEST.getStatus(), e.getResponse().getStatusLine().getStatusCode()); final String entity = TestUtils.getResponseBody(e.getResponse()); Assert.assertThat(entity, containsString("\"type\": \"IndexNotFoundException\"")); } @@ -1582,29 +1763,36 @@ public void multipleIndicesOneNotExistWithoutHint() throws IOException { // TODO Find way to check routing() without SearchRequestBuilder // to properly update these tests to OpenSearchIntegTestCase format -// @Test -// public void routingRequestOneRounting() throws IOException { -// SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, -// "SELECT /*! ROUTINGS(hey) */ * FROM %s ", TEST_INDEX_ACCOUNT)); -// SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); -// Assert.assertEquals("hey",searchRequestBuilder.request().routing()); -// } -// -// @Test -// public void routingRequestMultipleRountings() throws IOException { -// SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, -// "SELECT /*! ROUTINGS(hey,bye) */ * FROM %s ", TEST_INDEX_ACCOUNT)); -// SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); -// Assert.assertEquals("hey,bye",searchRequestBuilder.request().routing()); -// } + // @Test + // public void routingRequestOneRounting() throws IOException { + // SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, + // "SELECT /*! ROUTINGS(hey) */ * FROM %s ", + // TEST_INDEX_ACCOUNT)); + // SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); + // Assert.assertEquals("hey",searchRequestBuilder.request().routing()); + // } + // + // @Test + // public void routingRequestMultipleRountings() throws IOException { + // SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, + // "SELECT /*! ROUTINGS(hey,bye) */ * FROM %s ", + // TEST_INDEX_ACCOUNT)); + // SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); + // Assert.assertEquals("hey,bye",searchRequestBuilder.request().routing()); + // } @Ignore // Getting parser error: syntax error, expect RPAREN, actual IDENTIFIER insert_time @Test public void scriptFilterNoParams() throws IOException { - JSONObject result = executeQuery(String.format(Locale.ROOT, - "SELECT insert_time FROM %s where script('doc[\\'insert_time\''].date.hourOfDay==16') " + - "and insert_time <'2014-08-21T00:00:00.000Z'", TEST_INDEX_ONLINE)); + JSONObject result = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s where" + + " script('doc[\\'insert_time\''].date.hourOfDay==16') and insert_time" + + " <'2014-08-21T00:00:00.000Z'", + TEST_INDEX_ONLINE)); Assert.assertEquals(237, getTotalHits(result)); } @@ -1612,20 +1800,28 @@ public void scriptFilterNoParams() throws IOException { @Test public void scriptFilterWithParams() throws IOException { - JSONObject result = executeQuery(String.format(Locale.ROOT, - "SELECT insert_time FROM %s where script('doc[\\'insert_time\''].date.hourOfDay==x','x'=16) " + - "and insert_time <'2014-08-21T00:00:00.000Z'", TEST_INDEX_ONLINE)); + JSONObject result = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s where" + + " script('doc[\\'insert_time\''].date.hourOfDay==x','x'=16) and insert_time" + + " <'2014-08-21T00:00:00.000Z'", + TEST_INDEX_ONLINE)); Assert.assertEquals(237, getTotalHits(result)); } @Test public void highlightPreTagsAndPostTags() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT /*! HIGHLIGHT(phrase, pre_tags : [''], post_tags : ['']) */ " + - "* FROM %s " + - "WHERE phrase LIKE 'fox' " + - "ORDER BY _score", TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT /*! HIGHLIGHT(phrase, pre_tags : [''], post_tags : ['']) */ " + + "* FROM %s " + + "WHERE phrase LIKE 'fox' " + + "ORDER BY _score", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -1640,13 +1836,17 @@ public void highlightPreTagsAndPostTags() throws IOException { @Ignore @Test public void fieldCollapsingTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT /*! COLLAPSE({\"field\":\"age\"," + - "\"inner_hits\":{\"name\": \"account\"," + - "\"size\":1," + - "\"sort\":[{\"age\":\"asc\"}]}," + - "\"max_concurrent_group_searches\": 4}) */ " + - "* FROM %s", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT /*! COLLAPSE({\"field\":\"age\"," + + "\"inner_hits\":{\"name\": \"account\"," + + "\"size\":1," + + "\"sort\":[{\"age\":\"asc\"}]}," + + "\"max_concurrent_group_searches\": 4}) */ " + + "* FROM %s", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(21, hits.length()); @@ -1656,8 +1856,8 @@ public void fieldCollapsingTest() throws IOException { @Test public void backticksQuotedIndexNameTest() throws Exception { TestUtils.createIndexByRestClient(client(), "bank_unquote", null); - TestUtils - .loadDataByRestClient(client(), "bank", "/src/test/resources/bank_for_unquote_test.json"); + TestUtils.loadDataByRestClient( + client(), "bank", "/src/test/resources/bank_for_unquote_test.json"); JSONArray hits = getHits(executeQuery("SELECT lastname FROM `bank`")); Object responseIndex = ((JSONObject) hits.get(0)).query("/_index"); @@ -1665,39 +1865,57 @@ public void backticksQuotedIndexNameTest() throws Exception { assertEquals( executeQuery("SELECT lastname FROM bank", "jdbc"), - executeQuery("SELECT `bank`.`lastname` FROM `bank`", "jdbc") - ); + executeQuery("SELECT `bank`.`lastname` FROM `bank`", "jdbc")); assertEquals( executeQuery( - "SELECT `b`.`age` AS `AGE`, AVG(`b`.`balance`) FROM `bank` AS `b` " + - "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`age` ORDER BY `b`.`age`", + "SELECT `b`.`age` AS `AGE`, AVG(`b`.`balance`) FROM `bank` AS `b` " + + "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`age` ORDER BY `b`.`age`", "jdbc"), - executeQuery("SELECT b.age AS AGE, AVG(balance) FROM bank AS b " + - "WHERE ABS(age) > 20 GROUP BY b.age ORDER BY b.age", - "jdbc") - ); + executeQuery( + "SELECT b.age AS AGE, AVG(balance) FROM bank AS b " + + "WHERE ABS(age) > 20 GROUP BY b.age ORDER BY b.age", + "jdbc")); } @Test public void backticksQuotedFieldNamesTest() { - String expected = executeQuery(StringUtils.format("SELECT b.lastname FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); - String quotedFieldResult = executeQuery(StringUtils.format("SELECT b.`lastname` FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT b.lastname FROM %s AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); + String quotedFieldResult = + executeQuery( + StringUtils.format( + "SELECT b.`lastname` FROM %s AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); assertEquals(expected, quotedFieldResult); } @Test public void backticksQuotedAliasTest() { - String expected = executeQuery(StringUtils.format("SELECT b.lastname FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); - String quotedAliasResult = executeQuery(StringUtils.format("SELECT `b`.lastname FROM %s" + - " AS `b` ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT b.lastname FROM %s AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); + String quotedAliasResult = + executeQuery( + StringUtils.format( + "SELECT `b`.lastname FROM %s AS `b` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); String quotedAliasAndFieldResult = - executeQuery(StringUtils.format("SELECT `b`.`lastname` FROM %s " + - "AS `b` ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS `b` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); assertEquals(expected, quotedAliasResult); assertEquals(expected, quotedAliasAndFieldResult); @@ -1705,19 +1923,28 @@ public void backticksQuotedAliasTest() { @Test public void backticksQuotedAliasWithSpecialCharactersTest() { - String expected = executeQuery(StringUtils.format("SELECT b.lastname FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT b.lastname FROM %s AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); String specialCharAliasResult = - executeQuery(StringUtils.format("SELECT `b k`.lastname FROM %s " + - "AS `b k` ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + executeQuery( + StringUtils.format( + "SELECT `b k`.lastname FROM %s AS `b k` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); assertEquals(expected, specialCharAliasResult); } @Test public void backticksQuotedAliasInJDBCResponseTest() { - String query = StringUtils.format("SELECT `b`.`lastname` AS `name` FROM %s AS `b` " + - "ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK); + String query = + StringUtils.format( + "SELECT `b`.`lastname` AS `name` FROM %s AS `b` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK); String response = executeQuery(query, "jdbc"); assertTrue(response.contains("\"alias\": \"name\"")); @@ -1725,10 +1952,14 @@ public void backticksQuotedAliasInJDBCResponseTest() { @Test public void caseWhenSwitchTest() throws IOException { - JSONObject response = executeQuery("SELECT CASE age " + - "WHEN 30 THEN '1' " + - "WHEN 40 THEN '2' " + - "ELSE '0' END AS cases FROM " + TEST_INDEX_ACCOUNT + " WHERE age IS NOT NULL"); + JSONObject response = + executeQuery( + "SELECT CASE age " + + "WHEN 30 THEN '1' " + + "WHEN 40 THEN '2' " + + "ELSE '0' END AS cases FROM " + + TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL"); JSONObject hit = getHits(response).getJSONObject(0); String age = hit.query("/_source/age").toString(); String cases = age.equals("30") ? "1" : age.equals("40") ? "2" : "0"; @@ -1738,49 +1969,61 @@ public void caseWhenSwitchTest() throws IOException { @Test public void caseWhenJdbcResponseTest() { - String response = executeQuery("SELECT CASE age " + - "WHEN 30 THEN 'age is 30' " + - "WHEN 40 THEN 'age is 40' " + - "ELSE 'NA' END AS cases FROM " + TEST_INDEX_ACCOUNT + " WHERE age is not null", "jdbc"); + String response = + executeQuery( + "SELECT CASE age " + + "WHEN 30 THEN 'age is 30' " + + "WHEN 40 THEN 'age is 40' " + + "ELSE 'NA' END AS cases FROM " + + TEST_INDEX_ACCOUNT + + " WHERE age is not null", + "jdbc"); assertTrue( - response.contains("age is 30") || - response.contains("age is 40") || - response.contains("NA") - ); + response.contains("age is 30") + || response.contains("age is 40") + || response.contains("NA")); } @Ignore("This is already supported in new SQL engine") @Test public void functionInCaseFieldShouldThrowESExceptionDueToIllegalScriptInJdbc() { - String response = executeQuery( - "select case lower(firstname) when 'amber' then '1' else '2' end as cases from " + - TEST_INDEX_ACCOUNT, - "jdbc"); - queryInJdbcResponseShouldIndicateESException(response, "SearchPhaseExecutionException", + String response = + executeQuery( + "select case lower(firstname) when 'amber' then '1' else '2' end as cases from " + + TEST_INDEX_ACCOUNT, + "jdbc"); + queryInJdbcResponseShouldIndicateESException( + response, + "SearchPhaseExecutionException", "For more details, please send request for Json format"); } @Ignore("This is already supported in our new query engine") @Test public void functionCallWithIllegalScriptShouldThrowESExceptionInJdbc() { - String response = executeQuery("select log(balance + 2) from " + TEST_INDEX_BANK, - "jdbc"); - queryInJdbcResponseShouldIndicateESException(response, "SearchPhaseExecutionException", + String response = executeQuery("select log(balance + 2) from " + TEST_INDEX_BANK, "jdbc"); + queryInJdbcResponseShouldIndicateESException( + response, + "SearchPhaseExecutionException", "please send request for Json format to see the raw response from OpenSearch engine."); } - @Ignore("Goes in different route, does not call PrettyFormatRestExecutor.execute methods." + - "The performRequest method in RestClient doesn't throw any exceptions for null value fields in script") + @Ignore( + "Goes in different route, does not call PrettyFormatRestExecutor.execute methods.The" + + " performRequest method in RestClient doesn't throw any exceptions for null value" + + " fields in script") @Test public void functionArgWithNullValueFieldShouldThrowESExceptionInJdbc() { - String response = executeQuery( - "select log(balance) from " + TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc"); - queryInJdbcResponseShouldIndicateESException(response, "SearchPhaseExecutionException", + String response = + executeQuery("select log(balance) from " + TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc"); + queryInJdbcResponseShouldIndicateESException( + response, + "SearchPhaseExecutionException", "For more details, please send request for Json format"); } - private void queryInJdbcResponseShouldIndicateESException(String response, String exceptionType, - String... errMsgs) { + private void queryInJdbcResponseShouldIndicateESException( + String response, String exceptionType, String... errMsgs) { Assert.assertThat(response, containsString(exceptionType)); for (String errMsg : errMsgs) { Assert.assertThat(response, containsString(errMsg)); @@ -1803,9 +2046,21 @@ private void checkAggregationResponseSize(JSONObject response, int sizeCheck) { private void checkSelectAllAndFieldResponseSize(JSONObject response) { String[] arr = - new String[] {"account_number", "firstname", "address", "birthdate", "gender", "city", - "lastname", - "balance", "employer", "state", "age", "email", "male"}; + new String[] { + "account_number", + "firstname", + "address", + "birthdate", + "gender", + "city", + "lastname", + "balance", + "employer", + "state", + "age", + "email", + "male" + }; Set expectedSource = new HashSet<>(Arrays.asList(arr)); JSONArray hits = getHits(response); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java index dd48d82114..a94047c1e4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.core.common.Strings.isNullOrEmpty; @@ -49,6 +48,9 @@ import org.opensearch.core.xcontent.XContentBuilder; /** + * + * + *
  * SQL plugin integration test base class (migrated from SQLIntegTestCase)
  * 

* The execution of order is as follows: @@ -60,6 +62,7 @@ * XXXTIT: 3) init() 5) init() *

* TODO: this base class should extends ODFERestTestCase + *

*/ public abstract class RestIntegTestCase extends OpenSearchSQLRestTestCase { @@ -78,9 +81,9 @@ protected boolean preserveClusterUponCompletion() { } /** - * We need to be able to dump the jacoco coverage before cluster is shut down. - * The new internal testing framework removed some of the gradle tasks we were listening to - * to choose a good time to do it. This will dump the executionData to file after each test. + * We need to be able to dump the jacoco coverage before cluster is shut down. The new internal + * testing framework removed some of the gradle tasks we were listening to to choose a good time + * to do it. This will dump the executionData to file after each test.
* TODO: This is also currently just overwriting integTest.exec with the updated execData without * resetting after writing each time. This can be improved to either write an exec file per test * or by letting jacoco append to the file @@ -104,10 +107,12 @@ public static void dumpCoverage() { String serverUrl = "service:jmx:rmi:///jndi/rmi://127.0.0.1:7777/jmxrmi"; try (JMXConnector connector = JMXConnectorFactory.connect(new JMXServiceURL(serverUrl))) { - IProxy proxy = MBeanServerInvocationHandler.newProxyInstance( - connector.getMBeanServerConnection(), new ObjectName("org.jacoco:type=Runtime"), - IProxy.class, - false); + IProxy proxy = + MBeanServerInvocationHandler.newProxyInstance( + connector.getMBeanServerConnection(), + new ObjectName("org.jacoco:type=Runtime"), + IProxy.class, + false); Path path = Paths.get(jacocoBuildPath + "/integTest.exec"); Files.write(path, proxy.getExecutionData(false)); @@ -117,9 +122,9 @@ public static void dumpCoverage() { } /** - * As JUnit JavaDoc says: - * "The @AfterClass methods declared in superclasses will be run after those of the current class." - * So this method is supposed to run before closeClients() in parent class. + * As JUnit JavaDoc says:
+ * "The @AfterClass methods declared in superclasses will be run after those of the current + * class." So this method is supposed to run before closeClients() in parent class. */ @AfterClass public static void cleanUpIndices() throws IOException { @@ -128,8 +133,8 @@ public static void cleanUpIndices() throws IOException { } /** - * Make it thread-safe in case tests are running in parallel but does not guarantee - * if test like DeleteIT that mutates cluster running in parallel. + * Make it thread-safe in case tests are running in parallel but does not guarantee if test like + * DeleteIT that mutates cluster running in parallel. */ protected synchronized void loadIndex(Index index) throws IOException { String indexName = index.getName(); @@ -142,11 +147,8 @@ protected synchronized void loadIndex(Index index) throws IOException { } } - /** - * Provide for each test to load test index, data and other setup work - */ - protected void init() throws Exception { - } + /** Provide for each test to load test index, data and other setup work */ + protected void init() throws Exception {} protected static void updateClusterSetting(String settingKey, Object value) throws IOException { updateClusterSetting(settingKey, value, true); @@ -155,18 +157,18 @@ protected static void updateClusterSetting(String settingKey, Object value) thro protected static void updateClusterSetting(String settingKey, Object value, boolean persistent) throws IOException { String property = persistent ? PERSISTENT : TRANSIENT; - XContentBuilder builder = XContentFactory - .jsonBuilder() - .startObject() - .startObject(property) - .field(settingKey, value) - .endObject() - .endObject(); + XContentBuilder builder = + XContentFactory.jsonBuilder() + .startObject() + .startObject(property) + .field(settingKey, value) + .endObject() + .endObject(); Request request = new Request("PUT", "_cluster/settings"); request.setJsonEntity(builder.toString()); Response response = client().performRequest(request); - Assert - .assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + Assert.assertEquals( + RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); } protected static void wipeAllClusterSettings() throws IOException { @@ -174,103 +176,109 @@ protected static void wipeAllClusterSettings() throws IOException { updateClusterSetting("*", null, false); } - /** - * Enum for associating test index with relevant mapping and data. - */ + /** Enum for associating test index with relevant mapping and data. */ public enum Index { - ONLINE(TestsConstants.TEST_INDEX_ONLINE, - "online", - null, - "src/test/resources/online.json"), - ACCOUNT(TestsConstants.TEST_INDEX_ACCOUNT, + ONLINE(TestsConstants.TEST_INDEX_ONLINE, "online", null, "src/test/resources/online.json"), + ACCOUNT( + TestsConstants.TEST_INDEX_ACCOUNT, "account", getAccountIndexMapping(), "src/test/resources/accounts.json"), - PHRASE(TestsConstants.TEST_INDEX_PHRASE, + PHRASE( + TestsConstants.TEST_INDEX_PHRASE, "phrase", getPhraseIndexMapping(), "src/test/resources/phrases.json"), - DOG(TestsConstants.TEST_INDEX_DOG, - "dog", - getDogIndexMapping(), - "src/test/resources/dogs.json"), - DOGS2(TestsConstants.TEST_INDEX_DOG2, + DOG(TestsConstants.TEST_INDEX_DOG, "dog", getDogIndexMapping(), "src/test/resources/dogs.json"), + DOGS2( + TestsConstants.TEST_INDEX_DOG2, "dog", getDogs2IndexMapping(), "src/test/resources/dogs2.json"), - DOGS3(TestsConstants.TEST_INDEX_DOG3, + DOGS3( + TestsConstants.TEST_INDEX_DOG3, "dog", getDogs3IndexMapping(), "src/test/resources/dogs3.json"), - DOGSSUBQUERY(TestsConstants.TEST_INDEX_DOGSUBQUERY, + DOGSSUBQUERY( + TestsConstants.TEST_INDEX_DOGSUBQUERY, "dog", getDogIndexMapping(), "src/test/resources/dogsubquery.json"), - PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, - "people", - null, - "src/test/resources/peoples.json"), - PEOPLE2(TestsConstants.TEST_INDEX_PEOPLE2, + PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, "people", null, "src/test/resources/peoples.json"), + PEOPLE2( + TestsConstants.TEST_INDEX_PEOPLE2, "people", getPeople2IndexMapping(), "src/test/resources/people2.json"), - GAME_OF_THRONES(TestsConstants.TEST_INDEX_GAME_OF_THRONES, + GAME_OF_THRONES( + TestsConstants.TEST_INDEX_GAME_OF_THRONES, "gotCharacters", getGameOfThronesIndexMapping(), "src/test/resources/game_of_thrones_complex.json"), - SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, - "systems", - null, - "src/test/resources/systems.json"), - ODBC(TestsConstants.TEST_INDEX_ODBC, + SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, "systems", null, "src/test/resources/systems.json"), + ODBC( + TestsConstants.TEST_INDEX_ODBC, "odbc", getOdbcIndexMapping(), "src/test/resources/odbc-date-formats.json"), - LOCATION(TestsConstants.TEST_INDEX_LOCATION, + LOCATION( + TestsConstants.TEST_INDEX_LOCATION, "location", getLocationIndexMapping(), "src/test/resources/locations.json"), - LOCATION_TWO(TestsConstants.TEST_INDEX_LOCATION2, + LOCATION_TWO( + TestsConstants.TEST_INDEX_LOCATION2, "location2", getLocationIndexMapping(), "src/test/resources/locations2.json"), - NESTED(TestsConstants.TEST_INDEX_NESTED_TYPE, + NESTED( + TestsConstants.TEST_INDEX_NESTED_TYPE, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects.json"), - NESTED_WITH_QUOTES(TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, + NESTED_WITH_QUOTES( + TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects_quotes_in_values.json"), - EMPLOYEE_NESTED(TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, + EMPLOYEE_NESTED( + TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, "_doc", getEmployeeNestedTypeIndexMapping(), "src/test/resources/employee_nested.json"), - JOIN(TestsConstants.TEST_INDEX_JOIN_TYPE, + JOIN( + TestsConstants.TEST_INDEX_JOIN_TYPE, "joinType", getJoinTypeIndexMapping(), "src/test/resources/join_objects.json"), - BANK(TestsConstants.TEST_INDEX_BANK, + BANK( + TestsConstants.TEST_INDEX_BANK, "account", getBankIndexMapping(), "src/test/resources/bank.json"), - BANK_TWO(TestsConstants.TEST_INDEX_BANK_TWO, + BANK_TWO( + TestsConstants.TEST_INDEX_BANK_TWO, "account_two", getBankIndexMapping(), "src/test/resources/bank_two.json"), - BANK_WITH_NULL_VALUES(TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + BANK_WITH_NULL_VALUES( + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "account_null", getBankWithNullValuesIndexMapping(), "src/test/resources/bank_with_null_values.json"), - ORDER(TestsConstants.TEST_INDEX_ORDER, + ORDER( + TestsConstants.TEST_INDEX_ORDER, "_doc", getOrderIndexMapping(), "src/test/resources/order.json"), - WEBLOG(TestsConstants.TEST_INDEX_WEBLOG, + WEBLOG( + TestsConstants.TEST_INDEX_WEBLOG, "weblog", getWeblogsIndexMapping(), "src/test/resources/weblogs.json"), - DATE(TestsConstants.TEST_INDEX_DATE, + DATE( + TestsConstants.TEST_INDEX_DATE, "dates", getDateIndexMapping(), "src/test/resources/dates.json"); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java index c1c1a26f4a..356b910d5f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.allOf; @@ -45,10 +44,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; - -/** - * Created by allwefantasy on 8/25/16. - */ +/** Created by allwefantasy on 8/25/16. */ public class SQLFunctionsIT extends SQLIntegTestCase { @Override @@ -61,70 +57,75 @@ protected void init() throws Exception { @Test public void functionFieldAliasAndGroupByAlias() throws Exception { - String query = "SELECT " + - "floor(substring(address,0,3)*20) as key," + - "sum(age) cvalue FROM " + TEST_INDEX_ACCOUNT + " where address is not null " + - "group by key order by cvalue desc limit 10 "; + String query = + "SELECT " + + "floor(substring(address,0,3)*20) as key," + + "sum(age) cvalue FROM " + + TEST_INDEX_ACCOUNT + + " where address is not null " + + "group by key order by cvalue desc limit 10 "; final JSONObject result = executeQuery(query); - - IntStream.rangeClosed(0, 9).forEach(i -> { - Assert.assertNotNull(result.query(String.format("/aggregations/key/buckets/%d/key", i))); - Assert.assertNotNull( - result.query(String.format("/aggregations/key/buckets/%d/cvalue/value", i))); - } - ); + IntStream.rangeClosed(0, 9) + .forEach( + i -> { + Assert.assertNotNull( + result.query(String.format("/aggregations/key/buckets/%d/key", i))); + Assert.assertNotNull( + result.query(String.format("/aggregations/key/buckets/%d/cvalue/value", i))); + }); } /** * todo fix the issue. * - * @see https://github.com/opendistro-for-elasticsearch/sql/issues/59 + * @see https://github.com/opendistro-for-elasticsearch/sql/issues/59 */ @Ignore public void normalFieldAlias() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - "address as key,age from " + - TEST_INDEX_ACCOUNT + " where address is not null " + - "limit 10 "; + // here is a bug,csv field with spa + String query = + "SELECT " + + "address as key,age from " + + TEST_INDEX_ACCOUNT + + " where address is not null " + + "limit 10 "; - assertThat( - executeQuery(query), - hitAny(kvString("/_source/key", not(isEmptyOrNullString()))) - ); + assertThat(executeQuery(query), hitAny(kvString("/_source/key", not(isEmptyOrNullString())))); } @Test public void functionAlias() throws Exception { - //here is a bug,if only script fields are included,then all fields will return; fix later - String query = "SELECT " + - "substring(address,0,3) as key,address from " + - TEST_INDEX_ACCOUNT + " where address is not null " + - "order by address desc limit 10 "; + // here is a bug,if only script fields are included,then all fields will return; fix later + String query = + "SELECT " + + "substring(address,0,3) as key,address from " + + TEST_INDEX_ACCOUNT + + " where address is not null " + + "order by address desc limit 10 "; assertThat( executeQuery(query), - hitAny(both(kvString("/_source/address", equalTo("863 Wythe Place"))) - .and(kvString("/fields/key/0", - equalTo("863")))) - ); + hitAny( + both(kvString("/_source/address", equalTo("863 Wythe Place"))) + .and(kvString("/fields/key/0", equalTo("863"))))); } @Test public void caseChangeTest() throws IOException { - String query = "SELECT LOWER(firstname) " + - "FROM opensearch-sql_test_index_account " + - "WHERE UPPER(lastname)='DUKE' " + - "ORDER BY upper(lastname) "; + String query = + "SELECT LOWER(firstname) " + + "FROM opensearch-sql_test_index_account " + + "WHERE UPPER(lastname)='DUKE' " + + "ORDER BY upper(lastname) "; assertThat( executeQuery(query), hitAny( kvString("/_source/address", equalTo("880 Holmes Lane")), - kvString("/fields/LOWER(firstname)/0", equalTo("amber"))) - ); + kvString("/fields/LOWER(firstname)/0", equalTo("amber")))); } @Test @@ -133,23 +134,23 @@ public void caseChangeTestWithLocale() throws IOException { // "IL".toLowerCase() in a Turkish locale returns "ıl" // https://stackoverflow.com/questions/11063102/using-locales-with-javas-tolowercase-and-touppercase - String query = "SELECT LOWER(state.keyword, 'tr') " + - "FROM opensearch-sql_test_index_account " + - "WHERE account_number=1"; + String query = + "SELECT LOWER(state.keyword, 'tr') " + + "FROM opensearch-sql_test_index_account " + + "WHERE account_number=1"; assertThat( executeQuery(query), - hitAny( - kvString("/fields/LOWER(state.keyword, 'tr')/0", equalTo("ıl"))) - ); + hitAny(kvString("/fields/LOWER(state.keyword, 'tr')/0", equalTo("ıl")))); } @Test public void caseChangeWithAggregationTest() throws IOException { - String query = "SELECT UPPER(e.firstname) AS upper, COUNT(*)" + - "FROM opensearch-sql_test_index_account e " + - "WHERE LOWER(e.lastname)='duke' " + - "GROUP BY upper"; + String query = + "SELECT UPPER(e.firstname) AS upper, COUNT(*)" + + "FROM opensearch-sql_test_index_account e " + + "WHERE LOWER(e.lastname)='duke' " + + "GROUP BY upper"; assertThat( executeQuery(query), @@ -158,8 +159,10 @@ public void caseChangeWithAggregationTest() throws IOException { @Test public void castIntFieldToDoubleWithoutAliasTest() throws IOException { - String query = "SELECT CAST(age AS DOUBLE) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age DESC LIMIT 5"; + String query = + "SELECT CAST(age AS DOUBLE) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY age DESC LIMIT 5"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_age", "DOUBLE"); @@ -171,8 +174,9 @@ public void castIntFieldToDoubleWithoutAliasTest() throws IOException { @Test public void castIntFieldToDoubleWithAliasTest() throws IOException { String query = - "SELECT CAST(age AS DOUBLE) AS test_alias FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age LIMIT 5"; + "SELECT CAST(age AS DOUBLE) AS test_alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY age LIMIT 5"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "test_alias", "DOUBLE"); @@ -183,8 +187,10 @@ public void castIntFieldToDoubleWithAliasTest() throws IOException { @Test public void castIntFieldToStringWithoutAliasTest() throws IOException { - String query = "SELECT CAST(balance AS STRING) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY balance LIMIT 1"; + String query = + "SELECT CAST(balance AS STRING) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY balance LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_balance", "STRING"); @@ -195,48 +201,51 @@ public void castIntFieldToStringWithoutAliasTest() throws IOException { @Test public void castIntFieldToStringWithAliasTest() throws IOException { - String query = "SELECT CAST(balance AS STRING) AS cast_string_alias FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY cast_string_alias DESC LIMIT 1"; + String query = + "SELECT CAST(balance AS STRING) AS cast_string_alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY cast_string_alias DESC LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_string_alias", "STRING"); for (int i = 0; i < hits.length; ++i) { Assert.assertThat(hits[i].getFields().get("cast_string_alias").getValue(), is("9838")); } - } @Test public void castIntFieldToFloatWithoutAliasJdbcFormatTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) AS cast_balance FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY balance DESC LIMIT 1"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) AS cast_balance FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY balance DESC LIMIT 1"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", "cast_balance", "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", "cast_balance", "float")); - verifyDataRows(response, - rows(49989.0)); + verifyDataRows(response, rows(49989.0)); } @Test public void castIntFieldToFloatWithAliasJdbcFormatTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " + - "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY jdbc_float_alias LIMIT 1"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " + + "FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY jdbc_float_alias LIMIT 1"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); - verifyDataRows(response, - rows(1011.0)); + verifyDataRows(response, rows(1011.0)); } @Test public void castIntFieldToDoubleWithoutAliasOrderByTest() throws IOException { - String query = "SELECT CAST(age AS DOUBLE) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age LIMIT 1"; + String query = + "SELECT CAST(age AS DOUBLE) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY age LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_age", "DOUBLE"); @@ -247,148 +256,138 @@ public void castIntFieldToDoubleWithoutAliasOrderByTest() throws IOException { @Test public void castIntFieldToDoubleWithAliasOrderByTest() throws IOException { - String query = "SELECT CAST(age AS DOUBLE) AS alias FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY alias DESC LIMIT 1"; + String query = + "SELECT CAST(age AS DOUBLE) AS alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY alias DESC LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "alias", "DOUBLE"); for (int i = 0; i < hits.length; ++i) { Assert.assertThat(hits[i].getFields().get("alias").getValue(), is(40.0)); } - } @Test public void castIntFieldToFloatWithoutAliasJdbcFormatGroupByTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " GROUP BY balance ORDER BY balance DESC LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY balance ORDER BY balance DESC LIMIT 5"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", null, "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", null, "float")); - verifyDataRows(response, - rows(49989.0), - rows(49795.0), - rows(49741.0), - rows(49671.0), - rows(49587.0)); + verifyDataRows( + response, rows(49989.0), rows(49795.0), rows(49741.0), rows(49671.0), rows(49587.0)); } @Test public void castIntFieldToFloatWithAliasJdbcFormatGroupByTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " - + " FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " GROUP BY jdbc_float_alias " - + " ORDER BY jdbc_float_alias ASC " - + " LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " + + " FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY jdbc_float_alias " + + " ORDER BY jdbc_float_alias ASC " + + " LIMIT 5"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); - verifyDataRows(response, - rows(1011.0), - rows(10116.0), - rows(10138.0), - rows(10147.0), - rows(10178.0)); + verifyDataRows( + response, rows(1011.0), rows(10116.0), rows(10138.0), rows(10147.0), rows(10178.0)); } @Test public void castIntFieldToDoubleWithAliasJdbcFormatGroupByTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(age AS DOUBLE) AS jdbc_double_alias " + - "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " GROUP BY jdbc_double_alias DESC LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(age AS DOUBLE) AS jdbc_double_alias " + + "FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY jdbc_double_alias DESC LIMIT 5"); - verifySchema(response, - schema("jdbc_double_alias", "jdbc_double_alias", "double")); + verifySchema(response, schema("jdbc_double_alias", "jdbc_double_alias", "double")); - verifyDataRows(response, - rows("31.0"), - rows("39.0"), - rows("26.0"), - rows("32.0"), - rows("35.0")); + verifyDataRows(response, rows("31.0"), rows("39.0"), rows("26.0"), rows("32.0"), rows("35.0")); } @Test public void castKeywordFieldToDatetimeWithoutAliasJdbcFormatTest() { - JSONObject response = executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) FROM " - + TestsConstants.TEST_INDEX_DATE + " ORDER BY date_keyword"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) FROM " + + TestsConstants.TEST_INDEX_DATE + + " ORDER BY date_keyword"); verifySchema(response, schema("cast_date_keyword", null, "date")); - verifyDataRows(response, - rows("2014-08-19 07:09:13.434"), - rows("2019-09-25 02:04:13.469")); + verifyDataRows(response, rows("2014-08-19 07:09:13.434"), rows("2019-09-25 02:04:13.469")); } @Test public void castKeywordFieldToDatetimeWithAliasJdbcFormatTest() { JSONObject response = - executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " - + TestsConstants.TEST_INDEX_DATE + " ORDER BY date_keyword"); + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " + + TestsConstants.TEST_INDEX_DATE + + " ORDER BY date_keyword"); verifySchema(response, schema("test_alias", null, "date")); - verifyDataRows(response, - rows("2014-08-19 07:09:13.434"), - rows("2019-09-25 02:04:13.469")); + verifyDataRows(response, rows("2014-08-19 07:09:13.434"), rows("2019-09-25 02:04:13.469")); } @Test public void castFieldToDatetimeWithWhereClauseJdbcFormatTest() { - JSONObject response = executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) FROM " - + TestsConstants.TEST_INDEX_DATE + " WHERE date_keyword IS NOT NULL ORDER BY date_keyword"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) FROM " + + TestsConstants.TEST_INDEX_DATE + + " WHERE date_keyword IS NOT NULL ORDER BY date_keyword"); verifySchema(response, schema("cast_date_keyword", null, "date")); - verifyDataRows(response, - rows("2014-08-19 07:09:13.434"), - rows("2019-09-25 02:04:13.469")); + verifyDataRows(response, rows("2014-08-19 07:09:13.434"), rows("2019-09-25 02:04:13.469")); } @Test public void castFieldToDatetimeWithGroupByJdbcFormatTest() { JSONObject response = - executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " - + TestsConstants.TEST_INDEX_DATE + " GROUP BY test_alias DESC"); + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " + + TestsConstants.TEST_INDEX_DATE + + " GROUP BY test_alias DESC"); verifySchema(response, schema("test_alias", "test_alias", "double")); - verifyDataRows(response, - rows("2014-08-19T07:09:13.434Z"), - rows("2019-09-25T02:04:13.469Z")); + verifyDataRows(response, rows("2014-08-19T07:09:13.434Z"), rows("2019-09-25T02:04:13.469Z")); } - @Test public void castBoolFieldToNumericValueInSelectClause() { JSONObject response = executeJdbcRequest( "SELECT " - + " male, " - + " CAST(male AS INT) AS cast_int, " - + " CAST(male AS LONG) AS cast_long, " - + " CAST(male AS FLOAT) AS cast_float, " - + " CAST(male AS DOUBLE) AS cast_double " - + "FROM " + TestsConstants.TEST_INDEX_BANK + " " - + "WHERE account_number = 1 OR account_number = 13" - ); - - verifySchema(response, + + " male, " + + " CAST(male AS INT) AS cast_int, " + + " CAST(male AS LONG) AS cast_long, " + + " CAST(male AS FLOAT) AS cast_float, " + + " CAST(male AS DOUBLE) AS cast_double " + + "FROM " + + TestsConstants.TEST_INDEX_BANK + + " " + + "WHERE account_number = 1 OR account_number = 13"); + + verifySchema( + response, schema("male", "boolean"), schema("CAST(male AS INT)", "cast_int", "integer"), schema("CAST(male AS LONG)", "cast_long", "long"), schema("CAST(male AS FLOAT)", "cast_float", "float"), - schema("CAST(male AS DOUBLE)", "cast_double", "double") - ); - verifyDataRows(response, - rows(true, 1, 1, 1.0, 1.0), - rows(false, 0, 0, 0.0, 0.0) - ); + schema("CAST(male AS DOUBLE)", "cast_double", "double")); + verifyDataRows(response, rows(true, 1, 1, 1.0, 1.0), rows(false, 0, 0, 0.0, 0.0)); } @Test @@ -396,90 +395,82 @@ public void castBoolFieldToNumericValueWithGroupByAlias() { JSONObject response = executeJdbcRequest( "SELECT " - + "CAST(male AS INT) AS cast_int, " - + "COUNT(*) " - + "FROM " + TestsConstants.TEST_INDEX_BANK + " " - + "GROUP BY cast_int" - ); - - verifySchema(response, + + "CAST(male AS INT) AS cast_int, " + + "COUNT(*) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK + + " " + + "GROUP BY cast_int"); + + verifySchema( + response, schema("CAST(male AS INT)", "cast_int", "integer"), - schema("COUNT(*)", "integer") - ); - verifyDataRows(response, - rows(0, 3), - rows(1, 4) - ); + schema("COUNT(*)", "integer")); + verifyDataRows(response, rows(0, 3), rows(1, 4)); } @Test public void castStatementInWhereClauseGreaterThanTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (account_number < CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (account_number < CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(4180), - rows(5686), - rows(7004), - rows(7831), - rows(14127)); + verifyDataRows(response, rows(4180), rows(5686), rows(7004), rows(7831), rows(14127)); } @Test public void castStatementInWhereClauseLessThanTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (account_number > CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (account_number > CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(1011), - rows(1031), - rows(1110), - rows(1133), - rows(1172)); + verifyDataRows(response, rows(1011), rows(1031), rows(1110), rows(1133), rows(1172)); } @Test public void castStatementInWhereClauseEqualToConstantTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (CAST(age AS DOUBLE) = 36.0) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (CAST(age AS DOUBLE) = 36.0) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(1249), - rows(1463), - rows(3960), - rows(5686), - rows(6025)); + verifyDataRows(response, rows(1249), rows(1463), rows(3960), rows(5686), rows(6025)); } @Test public void castStatementInWhereClauseLessThanConstantTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (CAST(age AS DOUBLE) < 36.0) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (CAST(age AS DOUBLE) < 36.0) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(1011), - rows(1031), - rows(1110), - rows(1133), - rows(1172)); + verifyDataRows(response, rows(1011), rows(1031), rows(1110), rows(1133), rows(1172)); } /** - * Testing compilation - * Result comparison is empty then comparing different types (Date and keyword) + * Testing compilation Result comparison is empty then comparing different types (Date and + * keyword) */ @Test public void castStatementInWhereClauseDatetimeCastTest() { - JSONObject response = executeJdbcRequest("SELECT date_keyword FROM " - + TestsConstants.TEST_INDEX_DATE - + " WHERE (CAST(date_keyword AS DATETIME) = '2014-08-19T07:09:13.434Z')"); + JSONObject response = + executeJdbcRequest( + "SELECT date_keyword FROM " + + TestsConstants.TEST_INDEX_DATE + + " WHERE (CAST(date_keyword AS DATETIME) = '2014-08-19T07:09:13.434Z')"); String schema_result = "{\"name\":\"date_keyword\",\"type\":\"keyword\"}"; assertEquals(response.getJSONArray("schema").get(0).toString(), schema_result); @@ -487,30 +478,32 @@ public void castStatementInWhereClauseDatetimeCastTest() { @Test public void concat_ws_field_and_string() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - " concat_ws('-',age,'-') as age,address from " + - TEST_INDEX_ACCOUNT + " " + - " limit 10 "; + // here is a bug,csv field with spa + String query = + "SELECT " + + " concat_ws('-',age,'-') as age,address from " + + TEST_INDEX_ACCOUNT + + " " + + " limit 10 "; - assertThat( - executeQuery(query), - hitAny(kvString("/fields/age/0", endsWith("--"))) - ); + assertThat(executeQuery(query), hitAny(kvString("/fields/age/0", endsWith("--")))); } /** * Ignore this test case because painless doesn't allowlist String.split function. * - * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html + * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html */ @Ignore public void whereConditionLeftFunctionRightVariableEqualTest() throws Exception { - String query = "SELECT " + - " * from " + - TestsConstants.TEST_INDEX + " " + - " where split(address,' ')[0]='806' limit 1000 "; + String query = + "SELECT " + + " * from " + + TestsConstants.TEST_INDEX + + " " + + " where split(address,' ')[0]='806' limit 1000 "; assertThat(executeQuery(query).query("/hits/total"), equalTo(4)); } @@ -518,15 +511,18 @@ public void whereConditionLeftFunctionRightVariableEqualTest() throws Exception /** * Ignore this test case because painless doesn't allowlist String.split function. * - * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html + * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html */ @Ignore public void whereConditionLeftFunctionRightVariableGreatTest() throws Exception { - String query = "SELECT " + - " * from " + - TestsConstants.TEST_INDEX + " " + - " where floor(split(address,' ')[0]+0) > 805 limit 1000 "; + String query = + "SELECT " + + " * from " + + TestsConstants.TEST_INDEX + + " " + + " where floor(split(address,' ')[0]+0) > 805 limit 1000 "; assertThat(executeQuery(query).query("/hits/total"), equalTo(223)); } @@ -534,42 +530,45 @@ public void whereConditionLeftFunctionRightVariableGreatTest() throws Exception @Test public void concat_ws_fields() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - " concat_ws('-',age,address) as combine,address from " + - TEST_INDEX_ACCOUNT + " " + - " limit 10 "; - assertThat( - executeQuery(query), - hitAny(kvString("/fields/combine/0", containsString("-"))) - ); + // here is a bug,csv field with spa + String query = + "SELECT " + + " concat_ws('-',age,address) as combine,address from " + + TEST_INDEX_ACCOUNT + + " " + + " limit 10 "; + assertThat(executeQuery(query), hitAny(kvString("/fields/combine/0", containsString("-")))); } @Test public void functionLogs() throws Exception { - String query = "SELECT log10(100) as a, log(1) as b, log(2, 4) as c, log2(8) as d from " - + TEST_INDEX_ACCOUNT + " limit 1"; + String query = + "SELECT log10(100) as a, log(1) as b, log(2, 4) as c, log2(8) as d from " + + TEST_INDEX_ACCOUNT + + " limit 1"; assertThat( executeQuery(query), - hitAny(both(kvDouble("/fields/a/0", equalTo(Math.log10(100)))) - .and(kvDouble("/fields/b/0", equalTo(Math.log(1)))) - .and(kvDouble("/fields/c/0", closeTo(Math.log(4) / Math.log(2), 0.0001))) - .and(kvDouble("/fields/d/0", closeTo(Math.log(8) / Math.log(2), 0.0001)))) - ); + hitAny( + both(kvDouble("/fields/a/0", equalTo(Math.log10(100)))) + .and(kvDouble("/fields/b/0", equalTo(Math.log(1)))) + .and(kvDouble("/fields/c/0", closeTo(Math.log(4) / Math.log(2), 0.0001))) + .and(kvDouble("/fields/d/0", closeTo(Math.log(8) / Math.log(2), 0.0001))))); } @Test public void functionPow() throws Exception { - String query = "SELECT pow(account_number, 2) as key," + - "abs(age - 60) as new_age from " + TEST_INDEX_ACCOUNT + - " WHERE firstname = 'Virginia' and lastname='Ayala' limit 1"; + String query = + "SELECT pow(account_number, 2) as key," + + "abs(age - 60) as new_age from " + + TEST_INDEX_ACCOUNT + + " WHERE firstname = 'Virginia' and lastname='Ayala' limit 1"; assertThat( executeQuery(query), - hitAny(both(kvDouble("/fields/new_age/0", equalTo(21.0))) - .and(kvDouble("/fields/key/0", equalTo(625.0)))) - ); + hitAny( + both(kvDouble("/fields/new_age/0", equalTo(21.0))) + .and(kvDouble("/fields/key/0", equalTo(625.0))))); } @Test @@ -577,96 +576,88 @@ public void operatorSubstring() throws IOException { assertThat( executeQuery( "SELECT substring('sampleName', 1, 4) AS substring FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/substring/0", equalTo("samp"))) - ); + hitAny(kvString("/fields/substring/0", equalTo("samp")))); assertThat( executeQuery( "SELECT substring('sampleName', 0, 20) AS substring FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/substring/0", equalTo("sampleName"))) - ); + hitAny(kvString("/fields/substring/0", equalTo("sampleName")))); } @Test public void operatorLength() throws IOException { assertThat( - executeQuery("SELECT LENGTH(lastname) FROM " + TEST_INDEX_ACCOUNT + executeQuery( + "SELECT LENGTH(lastname) FROM " + + TEST_INDEX_ACCOUNT + " WHERE lastname IS NOT NULL GROUP BY LENGTH(lastname) ORDER BY LENGTH(lastname)", "jdbc"), - containsString("\"type\": \"integer\"") - ); + containsString("\"type\": \"integer\"")); assertThat( executeQuery("SELECT LENGTH('sampleName') AS length FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/length/0", equalTo(10))) - ); - + hitAny(kvInt("/fields/length/0", equalTo(10)))); } @Test public void operatorReplace() { String query = "SELECT REPLACE('elastic', 'el', 'fant') FROM " + TEST_INDEX_ACCOUNT; - assertThat( - executeQuery(query, "jdbc"), - containsString("fantastic") - ); + assertThat(executeQuery(query, "jdbc"), containsString("fantastic")); } - @Ignore("The LOCATE function is not implemented in new SQL engine. https://github" - + ".com/opensearch-project/sql/issues/74") + @Ignore( + "The LOCATE function is not implemented in new SQL engine. https://github" + + ".com/opensearch-project/sql/issues/74") public void operatorLocate() throws IOException { - String query = "SELECT LOCATE('a', lastname, 0) FROM " + TEST_INDEX_ACCOUNT - + - " WHERE lastname IS NOT NULL GROUP BY LOCATE('a', lastname, 0) ORDER BY LOCATE('a', lastname, 0)"; - assertThat( - executeQuery(query, "jdbc"), containsString("\"type\": \"integer\"") - ); + String query = + "SELECT LOCATE('a', lastname, 0) FROM " + + TEST_INDEX_ACCOUNT + + " WHERE lastname IS NOT NULL GROUP BY LOCATE('a', lastname, 0) ORDER BY LOCATE('a'," + + " lastname, 0)"; + assertThat(executeQuery(query, "jdbc"), containsString("\"type\": \"integer\"")); assertThat( executeQuery("SELECT LOCATE('a', 'sampleName', 3) AS locate FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/locate/0", equalTo(8))) - ); + hitAny(kvInt("/fields/locate/0", equalTo(8)))); assertThat( executeQuery("SELECT LOCATE('a', 'sampleName') AS locate FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/locate/0", equalTo(2))) - ); + hitAny(kvInt("/fields/locate/0", equalTo(2)))); } @Test public void rtrim() throws IOException { assertThat( executeQuery("SELECT RTRIM(' sampleName ') AS rtrim FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/rtrim/0", equalTo(" sampleName"))) - ); + hitAny(kvString("/fields/rtrim/0", equalTo(" sampleName")))); } @Test public void ltrim() throws IOException { assertThat( executeQuery("SELECT LTRIM(' sampleName ') AS ltrim FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ltrim/0", equalTo("sampleName "))) - ); + hitAny(kvString("/fields/ltrim/0", equalTo("sampleName ")))); } - @Ignore("The ASCII function is not implemented in new SQL engine. https://github" - + ".com/opensearch-project/sql/issues/73") + @Ignore( + "The ASCII function is not implemented in new SQL engine. https://github" + + ".com/opensearch-project/sql/issues/73") public void ascii() throws IOException { assertThat( - executeQuery("SELECT ASCII(lastname) FROM " + TEST_INDEX_ACCOUNT - + - " WHERE lastname IS NOT NULL GROUP BY ASCII(lastname) ORDER BY ASCII(lastname) LIMIT 5", + executeQuery( + "SELECT ASCII(lastname) FROM " + + TEST_INDEX_ACCOUNT + + " WHERE lastname IS NOT NULL GROUP BY ASCII(lastname) ORDER BY ASCII(lastname)" + + " LIMIT 5", "jdbc"), - containsString("\"type\": \"integer\"") - ); + containsString("\"type\": \"integer\"")); assertThat( executeQuery("SELECT ASCII('sampleName') AS ascii FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ascii/0", equalTo(115))) - ); + hitAny(kvInt("/fields/ascii/0", equalTo(115)))); } /** - * The following tests for LEFT and RIGHT are ignored because the OpenSearch client fails to parse "LEFT"/"RIGHT" in - * the integTest + * The following tests for LEFT and RIGHT are ignored because the OpenSearch client fails to parse + * "LEFT"/"RIGHT" in the integTest */ @Ignore @Test @@ -674,13 +665,11 @@ public void left() throws IOException { assertThat( executeQuery( "SELECT LEFT('sample', 2) AS left FROM " + TEST_INDEX_ACCOUNT + " ORDER BY left"), - hitAny(kvString("/fields/left/0", equalTo("sa"))) - ); + hitAny(kvString("/fields/left/0", equalTo("sa")))); assertThat( executeQuery( "SELECT LEFT('sample', 20) AS left FROM " + TEST_INDEX_ACCOUNT + " ORDER BY left"), - hitAny(kvString("/fields/left/0", equalTo("sample"))) - ); + hitAny(kvString("/fields/left/0", equalTo("sample")))); } @Ignore @@ -689,20 +678,20 @@ public void right() throws IOException { assertThat( executeQuery( "SELECT RIGHT('elastic', 3) AS right FROM " + TEST_INDEX_ACCOUNT + " ORDER BY right"), - hitAny(kvString("/fields/right/0", equalTo("tic"))) - ); + hitAny(kvString("/fields/right/0", equalTo("tic")))); assertThat( executeQuery( "SELECT RIGHT('elastic', 20) AS right FROM " + TEST_INDEX_ACCOUNT + " ORDER BY right"), - hitAny(kvString("/fields/right/0", equalTo("elastic"))) - ); + hitAny(kvString("/fields/right/0", equalTo("elastic")))); } @Test public void ifFuncShouldPassJDBC() { - JSONObject response = executeJdbcRequest( - "SELECT IF(age > 30, 'True', 'False') AS Ages FROM " + TEST_INDEX_ACCOUNT - + " WHERE age IS NOT NULL GROUP BY Ages"); + JSONObject response = + executeJdbcRequest( + "SELECT IF(age > 30, 'True', 'False') AS Ages FROM " + + TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY Ages"); assertEquals("IF(age > 30, 'True', 'False')", response.query("/schema/0/name")); assertEquals("Ages", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -712,35 +701,33 @@ public void ifFuncShouldPassJDBC() { public void ifFuncWithBinaryComparisonAsConditionTest() throws IOException { assertThat( executeQuery("SELECT IF(2 > 0, 'hello', 'world') AS ifTrue FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifTrue/0", equalTo("hello"))) - ); + hitAny(kvString("/fields/ifTrue/0", equalTo("hello")))); assertThat( executeQuery("SELECT IF(2 = 0, 'hello', 'world') AS ifFalse FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifFalse/0", equalTo("world"))) - ); + hitAny(kvString("/fields/ifFalse/0", equalTo("world")))); } @Test public void ifFuncWithBooleanExprInputAsConditionTest() throws IOException { assertThat( executeQuery("SELECT IF(true, 1, 0) AS ifBoolean FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ifBoolean/0", equalTo(1))) - ); + hitAny(kvInt("/fields/ifBoolean/0", equalTo(1)))); } @Test public void ifFuncWithNullInputAsConditionTest() throws IOException { assertThat( executeQuery("SELECT IF(null, 1, 0) AS ifNull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ifNull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/ifNull/0", equalTo(0)))); } @Test public void ifnullShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT IFNULL(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT - + " GROUP BY name"); + JSONObject response = + executeJdbcRequest( + "SELECT IFNULL(lastname, 'unknown') AS name FROM " + + TEST_INDEX_ACCOUNT + + " GROUP BY name"); assertEquals("IFNULL(lastname, 'unknown')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -750,27 +737,23 @@ public void ifnullShouldPassJDBC() throws IOException { public void ifnullWithNotNullInputTest() throws IOException { assertThat( executeQuery("SELECT IFNULL('sample', 'IsNull') AS ifnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifnull/0", equalTo("sample"))) - ); + hitAny(kvString("/fields/ifnull/0", equalTo("sample")))); } @Test public void ifnullWithNullInputTest() throws IOException { assertThat( executeQuery("SELECT IFNULL(null, 10) AS ifnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ifnull/0", equalTo(10))) - ); + hitAny(kvInt("/fields/ifnull/0", equalTo(10)))); assertThat( executeQuery("SELECT IFNULL('', 10) AS ifnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifnull/0", equalTo(""))) - ); + hitAny(kvString("/fields/ifnull/0", equalTo("")))); } @Test public void isnullShouldPassJDBC() { JSONObject response = - executeJdbcRequest( - "SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); + executeJdbcRequest("SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("ISNULL(lastname)", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("boolean", response.query("/schema/0/type")); @@ -780,61 +763,57 @@ public void isnullShouldPassJDBC() { public void isnullWithNotNullInputTest() throws IOException { assertThat( executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); } @Test public void isnullWithNullInputTest() throws IOException { assertThat( executeQuery("SELECT ISNULL(null) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(1))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(1)))); } @Test public void isnullWithMathExpr() throws IOException { assertThat( executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(1))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(1)))); } /** * Ignore this test case because painless doesn't allowlist String.split function. * - * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html + * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html */ @Ignore public void split_field() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - " split(address,' ')[0],age from " + - TestsConstants.TEST_INDEX + " where address is not null " + - " limit 10 "; + // here is a bug,csv field with spa + String query = + "SELECT " + + " split(address,' ')[0],age from " + + TestsConstants.TEST_INDEX + + " where address is not null " + + " limit 10 "; } @Test public void literal() throws Exception { - String query = "SELECT 10 " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 10 from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits[0].getFields(), hasValue(contains(10))); } @Test public void literalWithDoubleValue() throws Exception { - String query = "SELECT 10.0 " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 10.0 from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits[0].getFields(), hasValue(contains(10.0))); @@ -842,8 +821,7 @@ public void literalWithDoubleValue() throws Exception { @Test public void literalWithAlias() throws Exception { - String query = "SELECT 10 as key " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 10 as key from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits.length, is(1)); @@ -852,8 +830,7 @@ public void literalWithAlias() throws Exception { @Test public void literalMultiField() throws Exception { - String query = "SELECT 1, 2 " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 1, 2 from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits.length, is(1)); @@ -863,10 +840,11 @@ public void literalMultiField() throws Exception { private SearchHits query(String query) throws IOException { final String rsp = executeQueryWithStringOutput(query); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(rsp)); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(rsp)); return SearchResponse.fromXContent(parser).getHits(); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java index 58e55c4101..8335ada5a7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static com.google.common.base.Strings.isNullOrEmpty; @@ -68,9 +67,7 @@ import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.datasource.model.DataSourceMetadata; -/** - * OpenSearch Rest integration test base for SQL testing - */ +/** OpenSearch Rest integration test base for SQL testing */ public abstract class SQLIntegTestCase extends OpenSearchSQLRestTestCase { public static final String PERSISTENT = "persistent"; @@ -102,12 +99,16 @@ protected boolean preserveClusterUponCompletion() { } /** + * + * + *
    * We need to be able to dump the jacoco coverage before cluster is shut down.
    * The new internal testing framework removed some of the gradle tasks we were listening to
    * to choose a good time to do it. This will dump the executionData to file after each test.
    * TODO: This is also currently just overwriting integTest.exec with the updated execData without
    * resetting after writing each time. This can be improved to either write an exec file per test
    * or by letting jacoco append to the file
+   * 
*/ public interface IProxy { byte[] getExecutionData(boolean reset); @@ -128,10 +129,12 @@ public static void dumpCoverage() { String serverUrl = "service:jmx:rmi:///jndi/rmi://127.0.0.1:7777/jmxrmi"; try (JMXConnector connector = JMXConnectorFactory.connect(new JMXServiceURL(serverUrl))) { - IProxy proxy = MBeanServerInvocationHandler.newProxyInstance( - connector.getMBeanServerConnection(), new ObjectName("org.jacoco:type=Runtime"), - IProxy.class, - false); + IProxy proxy = + MBeanServerInvocationHandler.newProxyInstance( + connector.getMBeanServerConnection(), + new ObjectName("org.jacoco:type=Runtime"), + IProxy.class, + false); Path path = Paths.get(jacocoBuildPath + "/integTest.exec"); Files.write(path, proxy.getExecutionData(false)); @@ -141,9 +144,10 @@ public static void dumpCoverage() { } /** - * As JUnit JavaDoc says: - * "The @AfterClass methods declared in superclasses will be run after those of the current class." - * So this method is supposed to run before closeClients() in parent class. + * As JUnit JavaDoc says:
+ * "The @AfterClass methods declared in superclasses will be run after those of the current + * class."
+ * So this method is supposed to run before closeClients() in parent class. class. */ @AfterClass public static void cleanUpIndices() throws IOException { @@ -155,13 +159,16 @@ public static void cleanUpIndices() throws IOException { protected void setQuerySizeLimit(Integer limit) throws IOException { updateClusterSettings( - new ClusterSetting("transient", Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), limit.toString())); + new ClusterSetting( + "transient", Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), limit.toString())); } protected void resetQuerySizeLimit() throws IOException { updateClusterSettings( - new ClusterSetting("transient", Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), DEFAULT_QUERY_SIZE_LIMIT - .toString())); + new ClusterSetting( + "transient", + Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), + DEFAULT_QUERY_SIZE_LIMIT.toString())); } protected static void wipeAllClusterSettings() throws IOException { @@ -178,19 +185,16 @@ protected void setMaxResultWindow(String indexName, Integer window) throws IOExc } protected void resetMaxResultWindow(String indexName) throws IOException { - updateIndexSettings(indexName, - "{ \"index\": { \"max_result_window\": " + DEFAULT_MAX_RESULT_WINDOW + " } }"); + updateIndexSettings( + indexName, "{ \"index\": { \"max_result_window\": " + DEFAULT_MAX_RESULT_WINDOW + " } }"); } - /** - * Provide for each test to load test index, data and other setup work - */ - protected void init() throws Exception { - } + /** Provide for each test to load test index, data and other setup work */ + protected void init() throws Exception {} /** - * Make it thread-safe in case tests are running in parallel but does not guarantee - * if test like DeleteIT that mutates cluster running in parallel. + * Make it thread-safe in case tests are running in parallel but does not guarantee if test like + * DeleteIT that mutates cluster running in parallel. */ protected synchronized void loadIndex(Index index, RestClient client) throws IOException { String indexName = index.getName(); @@ -304,8 +308,9 @@ protected Request buildGetEndpointRequest(final String sqlQuery) { Assert.fail(utf8CharsetName + " not available"); } - final String requestUrl = String.format(Locale.ROOT, "%s?sql=%s&format=%s", QUERY_API_ENDPOINT, - urlEncodedQuery, "json"); + final String requestUrl = + String.format( + Locale.ROOT, "%s?sql=%s&format=%s", QUERY_API_ENDPOINT, urlEncodedQuery, "json"); return new Request("GET", requestUrl); } @@ -344,7 +349,8 @@ private String executeRequest(final String requestBody, final boolean isExplainQ return executeRequest(sqlRequest); } - protected static String executeRequest(final Request request, RestClient client) throws IOException { + protected static String executeRequest(final Request request, RestClient client) + throws IOException { Response response = client.performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); return getResponseBody(response); @@ -373,10 +379,12 @@ protected JSONObject executeCursorCloseQuery(final String cursor) throws IOExcep return new JSONObject(executeRequest(sqlRequest)); } - protected static JSONObject updateClusterSettings(ClusterSetting setting, RestClient client) throws IOException { + protected static JSONObject updateClusterSettings(ClusterSetting setting, RestClient client) + throws IOException { Request request = new Request("PUT", "/_cluster/settings"); - String persistentSetting = String.format(Locale.ROOT, - "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); + String persistentSetting = + String.format( + Locale.ROOT, "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); request.setJsonEntity(persistentSetting); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); restOptionsBuilder.addHeader("Content-Type", "application/json"); @@ -413,11 +421,7 @@ ClusterSetting nullify() { @Override public String toString() { - return "ClusterSetting{" + - "type='" + type + '\'' + - ", path='" + name + '\'' + - ", value='" + value + '\'' + - '}'; + return String.format("ClusterSetting{type='%s', path='%s', value='%s'}", type, name, value); } } @@ -438,16 +442,11 @@ protected String makeRequest(String query) { } protected String makeRequest(String query, int fetch_size) { - return String.format("{\n" + - " \"fetch_size\": \"%s\",\n" + - " \"query\": \"%s\"\n" + - "}", fetch_size, query); + return String.format("{ \"fetch_size\": \"%s\", \"query\": \"%s\" }", fetch_size, query); } protected String makeFetchLessRequest(String query) { - return String.format("{\n" + - " \"query\": \"%s\"\n" + - "}", query); + return String.format("{\n" + " \"query\": \"%s\"\n" + "}", query); } protected String makeCursorRequest(String cursor) { @@ -500,7 +499,6 @@ protected static Request getFetchDataSourceRequest(String name) { return request; } - protected static Request getDeleteDataSourceRequest(String name) { Request request = new Request("DELETE", "/_plugins/_query/_datasources" + "/" + name); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); @@ -509,175 +507,196 @@ protected static Request getDeleteDataSourceRequest(String name) { return request; } - /** - * Enum for associating test index with relevant mapping and data. - */ + /** Enum for associating test index with relevant mapping and data. */ public enum Index { - ONLINE(TestsConstants.TEST_INDEX_ONLINE, - "online", - null, - "src/test/resources/online.json"), - ACCOUNT(TestsConstants.TEST_INDEX_ACCOUNT, + ONLINE(TestsConstants.TEST_INDEX_ONLINE, "online", null, "src/test/resources/online.json"), + ACCOUNT( + TestsConstants.TEST_INDEX_ACCOUNT, "account", getAccountIndexMapping(), "src/test/resources/accounts.json"), - PHRASE(TestsConstants.TEST_INDEX_PHRASE, + PHRASE( + TestsConstants.TEST_INDEX_PHRASE, "phrase", getPhraseIndexMapping(), "src/test/resources/phrases.json"), - DOG(TestsConstants.TEST_INDEX_DOG, - "dog", - getDogIndexMapping(), - "src/test/resources/dogs.json"), - DOGS2(TestsConstants.TEST_INDEX_DOG2, + DOG(TestsConstants.TEST_INDEX_DOG, "dog", getDogIndexMapping(), "src/test/resources/dogs.json"), + DOGS2( + TestsConstants.TEST_INDEX_DOG2, "dog", getDogs2IndexMapping(), "src/test/resources/dogs2.json"), - DOGS3(TestsConstants.TEST_INDEX_DOG3, + DOGS3( + TestsConstants.TEST_INDEX_DOG3, "dog", getDogs3IndexMapping(), "src/test/resources/dogs3.json"), - DOGSSUBQUERY(TestsConstants.TEST_INDEX_DOGSUBQUERY, + DOGSSUBQUERY( + TestsConstants.TEST_INDEX_DOGSUBQUERY, "dog", getDogIndexMapping(), "src/test/resources/dogsubquery.json"), - PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, - "people", - null, - "src/test/resources/peoples.json"), - PEOPLE2(TestsConstants.TEST_INDEX_PEOPLE2, + PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, "people", null, "src/test/resources/peoples.json"), + PEOPLE2( + TestsConstants.TEST_INDEX_PEOPLE2, "people", getPeople2IndexMapping(), "src/test/resources/people2.json"), - GAME_OF_THRONES(TestsConstants.TEST_INDEX_GAME_OF_THRONES, + GAME_OF_THRONES( + TestsConstants.TEST_INDEX_GAME_OF_THRONES, "gotCharacters", getGameOfThronesIndexMapping(), "src/test/resources/game_of_thrones_complex.json"), - SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, - "systems", - null, - "src/test/resources/systems.json"), - ODBC(TestsConstants.TEST_INDEX_ODBC, + SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, "systems", null, "src/test/resources/systems.json"), + ODBC( + TestsConstants.TEST_INDEX_ODBC, "odbc", getOdbcIndexMapping(), "src/test/resources/odbc-date-formats.json"), - LOCATION(TestsConstants.TEST_INDEX_LOCATION, + LOCATION( + TestsConstants.TEST_INDEX_LOCATION, "location", getLocationIndexMapping(), "src/test/resources/locations.json"), - LOCATION_TWO(TestsConstants.TEST_INDEX_LOCATION2, + LOCATION_TWO( + TestsConstants.TEST_INDEX_LOCATION2, "location2", getLocationIndexMapping(), "src/test/resources/locations2.json"), - NESTED(TestsConstants.TEST_INDEX_NESTED_TYPE, + NESTED( + TestsConstants.TEST_INDEX_NESTED_TYPE, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects.json"), - NESTED_WITHOUT_ARRAYS(TestsConstants.TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS, + NESTED_WITHOUT_ARRAYS( + TestsConstants.TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS, "nestedTypeWithoutArrays", getNestedTypeIndexMapping(), "src/test/resources/nested_objects_without_arrays.json"), - NESTED_WITH_QUOTES(TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, + NESTED_WITH_QUOTES( + TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects_quotes_in_values.json"), - EMPLOYEE_NESTED(TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, + EMPLOYEE_NESTED( + TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, "_doc", getEmployeeNestedTypeIndexMapping(), "src/test/resources/employee_nested.json"), - JOIN(TestsConstants.TEST_INDEX_JOIN_TYPE, + JOIN( + TestsConstants.TEST_INDEX_JOIN_TYPE, "joinType", getJoinTypeIndexMapping(), "src/test/resources/join_objects.json"), - UNEXPANDED_OBJECT(TestsConstants.TEST_INDEX_UNEXPANDED_OBJECT, + UNEXPANDED_OBJECT( + TestsConstants.TEST_INDEX_UNEXPANDED_OBJECT, "unexpandedObject", getUnexpandedObjectIndexMapping(), "src/test/resources/unexpanded_objects.json"), - BANK(TestsConstants.TEST_INDEX_BANK, + BANK( + TestsConstants.TEST_INDEX_BANK, "account", getBankIndexMapping(), "src/test/resources/bank.json"), - BANK_TWO(TestsConstants.TEST_INDEX_BANK_TWO, + BANK_TWO( + TestsConstants.TEST_INDEX_BANK_TWO, "account_two", getBankIndexMapping(), "src/test/resources/bank_two.json"), - BANK_WITH_NULL_VALUES(TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + BANK_WITH_NULL_VALUES( + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "account_null", getBankWithNullValuesIndexMapping(), "src/test/resources/bank_with_null_values.json"), - BANK_WITH_STRING_VALUES(TestsConstants.TEST_INDEX_STRINGS, + BANK_WITH_STRING_VALUES( + TestsConstants.TEST_INDEX_STRINGS, "strings", getStringIndexMapping(), "src/test/resources/strings.json"), - BANK_CSV_SANITIZE(TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE, + BANK_CSV_SANITIZE( + TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE, "account", getBankIndexMapping(), "src/test/resources/bank_csv_sanitize.json"), - BANK_RAW_SANITIZE(TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE, - "account", - getBankIndexMapping(), - "src/test/resources/bank_raw_sanitize.json"), - ORDER(TestsConstants.TEST_INDEX_ORDER, + BANK_RAW_SANITIZE( + TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE, + "account", + getBankIndexMapping(), + "src/test/resources/bank_raw_sanitize.json"), + ORDER( + TestsConstants.TEST_INDEX_ORDER, "_doc", getOrderIndexMapping(), "src/test/resources/order.json"), - WEBLOG(TestsConstants.TEST_INDEX_WEBLOG, + WEBLOG( + TestsConstants.TEST_INDEX_WEBLOG, "weblog", getWeblogsIndexMapping(), "src/test/resources/weblogs.json"), - DATE(TestsConstants.TEST_INDEX_DATE, + DATE( + TestsConstants.TEST_INDEX_DATE, "dates", getDateIndexMapping(), "src/test/resources/dates.json"), - DATETIME(TestsConstants.TEST_INDEX_DATE_TIME, + DATETIME( + TestsConstants.TEST_INDEX_DATE_TIME, "_doc", getDateTimeIndexMapping(), "src/test/resources/datetime.json"), - NESTED_SIMPLE(TestsConstants.TEST_INDEX_NESTED_SIMPLE, + NESTED_SIMPLE( + TestsConstants.TEST_INDEX_NESTED_SIMPLE, "_doc", getNestedSimpleIndexMapping(), "src/test/resources/nested_simple.json"), - DEEP_NESTED(TestsConstants.TEST_INDEX_DEEP_NESTED, + DEEP_NESTED( + TestsConstants.TEST_INDEX_DEEP_NESTED, "_doc", getDeepNestedIndexMapping(), "src/test/resources/deep_nested_index_data.json"), - DATA_TYPE_NUMERIC(TestsConstants.TEST_INDEX_DATATYPE_NUMERIC, + DATA_TYPE_NUMERIC( + TestsConstants.TEST_INDEX_DATATYPE_NUMERIC, "_doc", getDataTypeNumericIndexMapping(), "src/test/resources/datatypes_numeric.json"), - DATA_TYPE_NONNUMERIC(TestsConstants.TEST_INDEX_DATATYPE_NONNUMERIC, + DATA_TYPE_NONNUMERIC( + TestsConstants.TEST_INDEX_DATATYPE_NONNUMERIC, "_doc", getDataTypeNonnumericIndexMapping(), "src/test/resources/datatypes.json"), - BEER(TestsConstants.TEST_INDEX_BEER, - "beer", - null, - "src/test/resources/beer.stackexchange.json"), - NULL_MISSING(TestsConstants.TEST_INDEX_NULL_MISSING, + BEER( + TestsConstants.TEST_INDEX_BEER, "beer", null, "src/test/resources/beer.stackexchange.json"), + NULL_MISSING( + TestsConstants.TEST_INDEX_NULL_MISSING, "null_missing", getMappingFile("null_missing_index_mapping.json"), "src/test/resources/null_missing.json"), - CALCS(TestsConstants.TEST_INDEX_CALCS, + CALCS( + TestsConstants.TEST_INDEX_CALCS, "calcs", getMappingFile("calcs_index_mappings.json"), "src/test/resources/calcs.json"), - DATE_FORMATS(TestsConstants.TEST_INDEX_DATE_FORMATS, + DATE_FORMATS( + TestsConstants.TEST_INDEX_DATE_FORMATS, "date_formats", getMappingFile("date_formats_index_mapping.json"), "src/test/resources/date_formats.json"), - WILDCARD(TestsConstants.TEST_INDEX_WILDCARD, + WILDCARD( + TestsConstants.TEST_INDEX_WILDCARD, "wildcard", getMappingFile("wildcard_index_mappings.json"), "src/test/resources/wildcard.json"), - DATASOURCES(TestsConstants.DATASOURCES, + DATASOURCES( + TestsConstants.DATASOURCES, "datasource", getMappingFile("datasources_index_mappings.json"), "src/test/resources/datasources.json"), - MULTI_NESTED(TestsConstants.TEST_INDEX_MULTI_NESTED_TYPE, + MULTI_NESTED( + TestsConstants.TEST_INDEX_MULTI_NESTED_TYPE, "multi_nested", getMappingFile("multi_nested.json"), "src/test/resources/multi_nested_objects.json"), - NESTED_WITH_NULLS(TestsConstants.TEST_INDEX_NESTED_WITH_NULLS, + NESTED_WITH_NULLS( + TestsConstants.TEST_INDEX_NESTED_WITH_NULLS, "multi_nested", getNestedTypeIndexMapping(), "src/test/resources/nested_with_nulls.json"); @@ -709,7 +728,5 @@ public String getMapping() { public String getDataSet() { return this.dataSet; } - - } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java index b28336c482..fa86bbbc22 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -20,7 +19,8 @@ public class ShowIT extends SQLIntegTestCase { @Override protected void init() { - // Note: not using the existing TEST_INDEX_* indices, since underscore in the names causes issues + // Note: not using the existing TEST_INDEX_* indices, since underscore in the names causes + // issues createEmptyIndexIfNotExist("abcdefg"); createEmptyIndexIfNotExist("abcdefghijk"); createEmptyIndexIfNotExist("abcdijk"); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java index a6a1a1cfe9..bf288262b6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -31,32 +30,44 @@ protected void init() throws Exception { @Test public void includeTest() throws IOException { - SearchHits response = query(String.format( - "SELECT include('*name','*ge'),include('b*'),include('*ddre*'),include('gender') FROM %s LIMIT 1000", - TEST_INDEX_ACCOUNT)); + SearchHits response = + query( + String.format( + "SELECT include('*name','*ge'),include('b*'),include('*ddre*'),include('gender')" + + " FROM %s LIMIT 1000", + TEST_INDEX_ACCOUNT)); for (SearchHit hit : response.getHits()) { Set keySet = hit.getSourceAsMap().keySet(); for (String field : keySet) { - Assert.assertTrue(field.endsWith("name") || field.endsWith("ge") || field.startsWith("b") || - field.contains("ddre") || field.equals("gender")); + Assert.assertTrue( + field.endsWith("name") + || field.endsWith("ge") + || field.startsWith("b") + || field.contains("ddre") + || field.equals("gender")); } } - } @Test public void excludeTest() throws IOException { - SearchHits response = query(String.format( - "SELECT exclude('*name','*ge'),exclude('b*'),exclude('*ddre*'),exclude('gender') FROM %s LIMIT 1000", - TEST_INDEX_ACCOUNT)); + SearchHits response = + query( + String.format( + "SELECT exclude('*name','*ge'),exclude('b*'),exclude('*ddre*'),exclude('gender')" + + " FROM %s LIMIT 1000", + TEST_INDEX_ACCOUNT)); for (SearchHit hit : response.getHits()) { Set keySet = hit.getSourceAsMap().keySet(); for (String field : keySet) { Assert.assertFalse( - field.endsWith("name") || field.endsWith("ge") || field.startsWith("b") || - field.contains("ddre") || field.equals("gender")); + field.endsWith("name") + || field.endsWith("ge") + || field.startsWith("b") + || field.contains("ddre") + || field.equals("gender")); } } } @@ -64,15 +75,18 @@ public void excludeTest() throws IOException { @Test public void allTest() throws IOException { - SearchHits response = query(String.format( - "SELECT exclude('*name','*ge'),include('b*'),exclude('*ddre*'),include('gender') FROM %s LIMIT 1000", - TEST_INDEX_ACCOUNT)); + SearchHits response = + query( + String.format( + "SELECT exclude('*name','*ge'),include('b*'),exclude('*ddre*'),include('gender')" + + " FROM %s LIMIT 1000", + TEST_INDEX_ACCOUNT)); for (SearchHit hit : response.getHits()) { Set keySet = hit.getSourceAsMap().keySet(); for (String field : keySet) { - Assert - .assertFalse(field.endsWith("name") || field.endsWith("ge") || field.contains("ddre")); + Assert.assertFalse( + field.endsWith("name") || field.endsWith("ge") || field.contains("ddre")); Assert.assertTrue(field.startsWith("b") || field.equals("gender")); } } @@ -81,11 +95,11 @@ public void allTest() throws IOException { private SearchHits query(String query) throws IOException { final JSONObject jsonObject = executeQuery(query); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser).getHits(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java index 0fd0fea7f7..c1d656628f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.both; @@ -38,9 +37,7 @@ public class SubqueryIT extends SQLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Override protected void init() throws Exception { @@ -51,50 +48,55 @@ protected void init() throws Exception { @Test public void testIN() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + + "FROM %s A " + + "WHERE holdersName IN (SELECT firstname FROM %s B) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/A.dog_name", is("snoopy")), - kvString("/_source/A.dog_name", is("gogo")) - ) - ); + kvString("/_source/A.dog_name", is("gogo")))); } @Test public void testINWithAlias() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT A.dog_name " + - "FROM %s A " + - "WHERE A.holdersName IN (SELECT B.firstname FROM %s B) " + - "AND A.dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT A.dog_name " + + "FROM %s A " + + "WHERE A.holdersName IN (SELECT B.firstname FROM %s B) " + + "AND A.dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/A.dog_name", is("snoopy")), - kvString("/_source/A.dog_name", is("gogo")) - ) - ); + kvString("/_source/A.dog_name", is("gogo")))); } @Test public void testINSelectAll() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT * " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s A " + + "WHERE holdersName IN (SELECT firstname FROM %s B) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( @@ -105,39 +107,38 @@ public void testINSelectAll() throws IOException { .and(kvInt("/_source/A.age", is(4))), both(kvString("/_source/A.dog_name", is("gogo"))) .and(kvString("/_source/A.holdersName", is("Gabrielle"))) - .and(kvInt("/_source/A.age", is(6))) - ) - ); + .and(kvInt("/_source/A.age", is(6))))); } @Test public void testINWithInnerWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B WHERE age <> 36) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + + "FROM %s A " + + "WHERE holdersName IN (SELECT firstname FROM %s B WHERE age <> 36) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/A.dog_name", is("gogo")) - ) - ); + assertThat(response, hitAll(kvString("/_source/A.dog_name", is("gogo")))); } @Test public void testNotSupportedQuery() throws IOException { exceptionRule.expect(ResponseException.class); exceptionRule.expectMessage("Unsupported subquery"); - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName NOT IN (SELECT firstname FROM %s B WHERE age <> 36) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + + "FROM %s A " + + "WHERE holdersName NOT IN (SELECT firstname FROM %s B WHERE age <> 36) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); executeQuery(query); } @@ -145,100 +146,91 @@ public void testNotSupportedQuery() throws IOException { @Ignore @Test public void testINWithDuplicate() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B)", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name FROM %s A WHERE holdersName IN (SELECT firstname FROM %s B)", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/A.dog_name", is("snoopy")), - kvString("/_source/A.dog_name", is("babala")) - ) - ); + kvString("/_source/A.dog_name", is("babala")))); } @Test public void nonCorrelatedExists() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p)", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name FROM %s as e WHERE EXISTS (SELECT * FROM e.projects as p)", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/name", is("Bob Smith")), - kvString("/_source/name", is("Jane Smith")) - ) - ); + kvString("/_source/name", is("Jane Smith")))); } @Test public void nonCorrelatedExistsWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/name", is("Bob Smith")) - ) - ); + assertThat(response, hitAll(kvString("/_source/name", is("Bob Smith")))); } @Test public void nonCorrelatedExistsParentWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + - "AND e.name LIKE 'jane'", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + + "AND e.name LIKE 'jane'", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/name", is("Jane Smith")) - ) - ); + assertThat(response, hitAll(kvString("/_source/name", is("Jane Smith")))); } @Test public void nonCorrelatedNotExists() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p)", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name FROM %s as e WHERE NOT EXISTS (SELECT * FROM e.projects as p)", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/name", is("Susan Smith")), - kvString("/_source/name", is("John Doe")) - ) - ); + kvString("/_source/name", is("John Doe")))); } @Test public void nonCorrelatedNotExistsWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); assertThat( @@ -246,52 +238,55 @@ public void nonCorrelatedNotExistsWhere() throws IOException { hitAll( kvString("/_source/name", is("Susan Smith")), kvString("/_source/name", is("Jane Smith")), - kvString("/_source/name", is("John Doe")) - ) - ); + kvString("/_source/name", is("John Doe")))); } @Test public void nonCorrelatedNotExistsParentWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + - "AND e.name LIKE 'smith'", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + + "AND e.name LIKE 'smith'", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/name", is("Susan Smith")) - ) - ); + assertThat(response, hitAll(kvString("/_source/name", is("Susan Smith")))); } @Test public void selectFromSubqueryWithCountShouldPass() throws IOException { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count FROM (SELECT COUNT(*) as TEMP FROM %s) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/count/value"), equalTo(1000)); } @Test public void selectFromSubqueryWithWhereAndCountShouldPass() throws IOException { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s WHERE age > 30) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s WHERE age > 30) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/count/value"), equalTo(502)); } @Test public void selectFromSubqueryWithCountAndGroupByShouldPass() throws Exception { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY gender) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY gender) t", + TEST_INDEX_ACCOUNT)); assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = (JSONObject) result.query("/aggregations/gender"); @@ -312,11 +307,12 @@ public void selectFromSubqueryWithCountAndGroupByShouldPass() throws Exception { @Test public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws IOException { - JSONObject result = executeQuery( - StringUtils.format( - "SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY age ORDER BY TEMP) t", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY age ORDER BY TEMP) t", + TEST_INDEX_ACCOUNT)); JSONArray buckets = (JSONArray) result.query("/aggregations/age/buckets"); List countList = new ArrayList<>(); for (int i = 0; i < buckets.length(); ++i) { @@ -328,44 +324,50 @@ public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws I @Test public void selectFromSubqueryWithCountAndGroupByAndHavingShouldPass() throws Exception { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.T1 as g, t.T2 as c " + - "FROM (SELECT gender as T1, COUNT(*) as T2 " + - " FROM %s " + - " GROUP BY gender " + - " HAVING T2 > 500) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.T1 as g, t.T2 as c " + + "FROM (SELECT gender as T1, COUNT(*) as T2 " + + " FROM %s " + + " GROUP BY gender " + + " HAVING T2 > 500) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/g/buckets/0/c/value"), equalTo(507)); } @Test public void selectFromSubqueryCountAndSum() throws IOException { - JSONObject result = executeQuery( - StringUtils.format( - "SELECT t.TEMP1 as count, t.TEMP2 as balance " + - "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + - " FROM %s) t", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP1 as count, t.TEMP2 as balance " + + "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + + " FROM %s) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/count/value"), equalTo(1000)); - assertThat(((BigDecimal) result.query("/aggregations/balance/value")).doubleValue(), + assertThat( + ((BigDecimal) result.query("/aggregations/balance/value")).doubleValue(), closeTo(25714837.0, 0.01)); } @Test public void selectFromSubqueryWithoutAliasShouldPass() throws IOException { - JSONObject response = executeJdbcRequest( - StringUtils.format( - "SELECT a.firstname AS my_first, a.lastname AS my_last, a.age AS my_age " + - "FROM (SELECT firstname, lastname, age " + - "FROM %s " + - "WHERE age = 40 and account_number = 291) AS a", - TEST_INDEX_ACCOUNT)); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + StringUtils.format( + "SELECT a.firstname AS my_first, a.lastname AS my_last, a.age AS my_age " + + "FROM (SELECT firstname, lastname, age " + + "FROM %s " + + "WHERE age = 40 and account_number = 291) AS a", + TEST_INDEX_ACCOUNT)); + + verifySchema( + response, schema("firstname", "my_first", "text"), schema("lastname", "my_last", "text"), schema("age", "my_age", "long")); - verifyDataRows(response, - rows("Lynn", "Pollard", 40)); + verifyDataRows(response, rows("Lynn", "Pollard", 40)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java index fcc9b048c9..ab2808ee3f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -17,7 +16,6 @@ import org.opensearch.client.ResponseException; import org.opensearch.core.rest.RestStatus; - public class TermQueryExplainIT extends SQLIntegTestCase { @Override @@ -35,13 +33,15 @@ protected void init() throws Exception { @Test public void testNonExistingIndex() throws IOException { try { - explainQuery("SELECT firstname, lastname " + - "FROM opensearch_sql_test_fake_index " + - "WHERE firstname = 'Leo'"); + explainQuery( + "SELECT firstname, lastname " + + "FROM opensearch_sql_test_fake_index " + + "WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("no such index")); @@ -52,13 +52,12 @@ public void testNonExistingIndex() throws IOException { @Test public void testNonResolvingIndexPattern() throws IOException { try { - explainQuery("SELECT * " + - "FROM opensearch_sql_test_blah_blah* " + - "WHERE firstname = 'Leo'"); + explainQuery("SELECT * FROM opensearch_sql_test_blah_blah* WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("Field [firstname] cannot be found or used here.")); @@ -68,10 +67,11 @@ public void testNonResolvingIndexPattern() throws IOException { @Test public void testNonResolvingIndexPatternWithExistingIndex() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch_sql_test_blah_blah*, opensearch-sql_test_index_bank " + - "WHERE state = 'DC'"); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch_sql_test_blah_blah*, opensearch-sql_test_index_bank " + + "WHERE state = 'DC'"); assertThat(result, containsString("\"term\":{\"state.keyword\"")); } @@ -79,12 +79,13 @@ public void testNonResolvingIndexPatternWithExistingIndex() throws IOException { public void testNonResolvingIndexPatternWithNonExistingIndex() throws IOException { try { explainQuery( - "SELECT firstname, lastname " + - "FROM opensearch_sql_test_blah_blah*, another_fake_index " + - "WHERE firstname = 'Leo'"); + "SELECT firstname, lastname " + + "FROM opensearch_sql_test_blah_blah*, another_fake_index " + + "WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("no such index")); @@ -95,11 +96,11 @@ public void testNonResolvingIndexPatternWithNonExistingIndex() throws IOExceptio @Test public void testNonCompatibleMappings() throws IOException { try { - explainQuery( - "SELECT * FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2"); + explainQuery("SELECT * FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("Field [holdersName] have conflict type")); @@ -108,14 +109,15 @@ public void testNonCompatibleMappings() throws IOException { } /** - * The dog_name field has same type in dog and dog2 index. - * But, the holdersName field has different type. + * The dog_name field has same type in dog and dog2 index. But, the holdersName field has + * different type. */ @Test public void testNonCompatibleMappingsButTheFieldIsNotUsed() throws IOException { - String result = explainQuery( - "SELECT dog_name " + - "FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2 WHERE dog_name = 'dog'"); + String result = + explainQuery( + "SELECT dog_name FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2" + + " WHERE dog_name = 'dog'"); System.out.println(result); assertThat(result, containsString("dog_name")); assertThat(result, containsString("_source")); @@ -123,20 +125,21 @@ public void testNonCompatibleMappingsButTheFieldIsNotUsed() throws IOException { @Test public void testEqualFieldMappings() throws IOException { - String result = explainQuery( - "SELECT color " + - "FROM opensearch-sql_test_index_dog2, opensearch-sql_test_index_dog3"); + String result = + explainQuery( + "SELECT color " + + "FROM opensearch-sql_test_index_dog2, opensearch-sql_test_index_dog3"); assertThat(result, containsString("color")); assertThat(result, containsString("_source")); } @Test public void testIdenticalMappings() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank, opensearch-sql_test_index_bank_two " + - "WHERE state = 'WA' OR male = true" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank, opensearch-sql_test_index_bank_two " + + "WHERE state = 'WA' OR male = true"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, containsString("_source")); @@ -144,24 +147,23 @@ public void testIdenticalMappings() throws IOException { @Test public void testIdenticalMappingsWithTypes() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank/account, opensearch-sql_test_index_bank_two/account_two " + - "WHERE state = 'WA' OR male = true" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state FROM opensearch-sql_test_index_bank/account," + + " opensearch-sql_test_index_bank_two/account_two WHERE state = 'WA' OR male =" + + " true"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, containsString("_source")); } - @Test public void testIdenticalMappingsWithPartialType() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank/account, opensearch-sql_test_index_bank_two " + - "WHERE state = 'WA' OR male = true" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank/account, opensearch-sql_test_index_bank_two " + + "WHERE state = 'WA' OR male = true"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, containsString("_source")); @@ -170,22 +172,22 @@ public void testIdenticalMappingsWithPartialType() throws IOException { @Test public void testTextFieldOnly() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank " + - "WHERE firstname = 'Abbas'" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank " + + "WHERE firstname = 'Abbas'"); assertThat(result, containsString("term")); assertThat(result, not(containsString("firstname."))); } @Test public void testTextAndKeywordAppendsKeywordAlias() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank " + - "WHERE state = 'WA' OR lastname = 'Chen'" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank " + + "WHERE state = 'WA' OR lastname = 'Chen'"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, not(containsString("lastname."))); @@ -194,8 +196,7 @@ public void testTextAndKeywordAppendsKeywordAlias() throws IOException { @Test public void testBooleanFieldNoKeywordAlias() throws IOException { - String result = - explainQuery("SELECT * FROM opensearch-sql_test_index_bank WHERE male = false"); + String result = explainQuery("SELECT * FROM opensearch-sql_test_index_bank WHERE male = false"); assertThat(result, containsString("term")); assertThat(result, not(containsString("male."))); } @@ -203,8 +204,8 @@ public void testBooleanFieldNoKeywordAlias() throws IOException { @Test public void testDateFieldNoKeywordAlias() throws IOException { - String result = explainQuery( - "SELECT * FROM opensearch-sql_test_index_bank WHERE birthdate = '2018-08-19'"); + String result = + explainQuery("SELECT * FROM opensearch-sql_test_index_bank WHERE birthdate = '2018-08-19'"); assertThat(result, containsString("term")); assertThat(result, not(containsString("birthdate."))); } @@ -218,11 +219,11 @@ public void testNumberNoKeywordAlias() throws IOException { @Test public void inTestInWhere() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank " + - "WHERE state IN ('WA' , 'PA' , 'TN')" - ); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch-sql_test_index_bank " + + "WHERE state IN ('WA' , 'PA' , 'TN')"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @@ -230,53 +231,51 @@ public void inTestInWhere() throws IOException { @Test @Ignore // TODO: enable when subqueries are fixed public void inTestInWhereSubquery() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank/account WHERE " + - "state IN (SELECT state FROM opensearch-sql_test_index_bank WHERE city = 'Nicholson')" - ); + String result = + explainQuery( + "SELECT * FROM opensearch-sql_test_index_bank/account WHERE state IN (SELECT state FROM" + + " opensearch-sql_test_index_bank WHERE city = 'Nicholson')"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @Test public void testKeywordAliasGroupBy() throws IOException { - String result = explainQuery( - "SELECT firstname, state " + - "FROM opensearch-sql_test_index_bank/account " + - "GROUP BY firstname, state"); + String result = + explainQuery( + "SELECT firstname, state " + + "FROM opensearch-sql_test_index_bank/account " + + "GROUP BY firstname, state"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @Test public void testKeywordAliasGroupByUsingTableAlias() throws IOException { - String result = explainQuery( - "SELECT a.firstname, a.state " + - "FROM opensearch-sql_test_index_bank/account a " + - "GROUP BY a.firstname, a.state"); + String result = + explainQuery( + "SELECT a.firstname, a.state " + + "FROM opensearch-sql_test_index_bank/account a " + + "GROUP BY a.firstname, a.state"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @Test public void testKeywordAliasOrderBy() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank " + - "ORDER BY state, lastname " - ); + String result = + explainQuery("SELECT * FROM opensearch-sql_test_index_bank ORDER BY state, lastname "); assertThat(result, containsString("\"state.keyword\":{\"order\":\"asc\"")); assertThat(result, containsString("\"lastname\":{\"order\":\"asc\"}")); } @Test public void testKeywordAliasOrderByUsingTableAlias() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank b " + - "ORDER BY b.state, b.lastname " - ); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch-sql_test_index_bank b " + + "ORDER BY b.state, b.lastname "); assertThat(result, containsString("\"state.keyword\":{\"order\":\"asc\"")); assertThat(result, containsString("\"lastname\":{\"order\":\"asc\"}")); } @@ -286,13 +285,13 @@ public void testKeywordAliasOrderByUsingTableAlias() throws IOException { public void testJoinWhere() throws IOException { String expectedOutput = TestUtils.fileToString("src/test/resources/expectedOutput/term_join_where", true); - String result = explainQuery( - "SELECT a.firstname, a.lastname , b.city " + - "FROM opensearch-sql_test_index_account a " + - "JOIN opensearch-sql_test_index_account b " + - "ON a.city = b.city " + - "WHERE a.city IN ('Nicholson', 'Yardville')" - ); + String result = + explainQuery( + "SELECT a.firstname, a.lastname , b.city " + + "FROM opensearch-sql_test_index_account a " + + "JOIN opensearch-sql_test_index_account b " + + "ON a.city = b.city " + + "WHERE a.city IN ('Nicholson', 'Yardville')"); assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @@ -301,57 +300,56 @@ public void testJoinWhere() throws IOException { public void testJoinAliasMissing() throws IOException { try { explainQuery( - "SELECT a.firstname, a.lastname , b.city " + - "FROM opensearch-sql_test_index_account a " + - "JOIN opensearch-sql_test_index_account b " + - "ON a.city = b.city " + - "WHERE city IN ('Nicholson', 'Yardville')" - ); + "SELECT a.firstname, a.lastname , b.city " + + "FROM opensearch-sql_test_index_account a " + + "JOIN opensearch-sql_test_index_account b " + + "ON a.city = b.city " + + "WHERE city IN ('Nicholson', 'Yardville')"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("Field name [city] is ambiguous")); assertThat(entity, containsString("\"type\": \"VerificationException\"")); } - } @Test public void testNestedSingleConditionAllFields() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "WHERE p.name = 'something' " - ); - assertThat(result, - containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "WHERE p.name = 'something' "); + assertThat( + result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); assertThat(result, containsString("\"path\":\"projects\"")); } @Test public void testNestedMultipleCondition() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "WHERE p.name = 'something' and p.started_year = 1990 " - ); - assertThat(result, - containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "WHERE p.name = 'something' and p.started_year = 1990 "); + assertThat( + result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); assertThat(result, containsString("\"term\":{\"projects.started_year\":{\"value\":1990")); assertThat(result, containsString("\"path\":\"projects\"")); } @Test public void testConditionsOnDifferentNestedDocs() throws IOException { - String result = explainQuery( - "SELECT p.name, c.likes " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p, e.comments c " + - "WHERE p.name = 'something' or c.likes = 56 " - ); - assertThat(result, - containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); + String result = + explainQuery( + "SELECT p.name, c.likes " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p, e.comments c " + + "WHERE p.name = 'something' or c.likes = 56 "); + assertThat( + result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); assertThat(result, containsString("\"term\":{\"comments.likes\":{\"value\":56")); assertThat(result, containsString("\"path\":\"projects\"")); assertThat(result, containsString("\"path\":\"comments\"")); @@ -359,11 +357,11 @@ public void testConditionsOnDifferentNestedDocs() throws IOException { @Test public void testNestedSingleConditionSpecificFields() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "WHERE p.name = 'hello' or p.name = 'world' " - ); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "WHERE p.name = 'hello' or p.name = 'world' "); assertThat(result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"hello\"")); assertThat(result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"world\"")); assertThat(result, containsString("\"path\":\"projects\"")); @@ -371,32 +369,33 @@ public void testNestedSingleConditionSpecificFields() throws IOException { @Test public void testNestedSingleGroupBy() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "GROUP BY p.name "); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "GROUP BY p.name "); assertThat(result, containsString("\"terms\":{\"field\":\"projects.name.keyword\"")); assertThat(result, containsString("\"nested\":{\"path\":\"projects\"")); } @Test public void testNestedSingleOrderBy() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "ORDER BY p.name " - ); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "ORDER BY p.name "); assertThat(result, containsString("\"sort\":[{\"projects.name.keyword\"")); assertThat(result, containsString("\"nested\":{\"path\":\"projects\"")); } @Test public void testNestedIsNotNullExplain() throws IOException { - String explain = explainQuery( - "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + - "WHERE p IS NOT NULL" - ); + String explain = + explainQuery( + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + + "WHERE p IS NOT NULL"); assertThat(explain, containsString("\"exists\":{\"field\":\"projects\"")); assertThat(explain, containsString("\"path\":\"projects\"")); @@ -407,14 +406,15 @@ public void testNestedIsNotNullExplain() throws IOException { public void testMultiQuery() throws IOException { String expectedOutput = TestUtils.fileToString("src/test/resources/expectedOutput/term_union_where", true); - String result = explainQuery( - "SELECT firstname " + - "FROM opensearch-sql_test_index_account/account " + - "WHERE firstname = 'Amber' " + - "UNION ALL " + - "SELECT dog_name as firstname " + - "FROM opensearch-sql_test_index_dog/dog " + - "WHERE holdersName = 'Hattie' OR dog_name = 'rex'"); + String result = + explainQuery( + "SELECT firstname " + + "FROM opensearch-sql_test_index_account/account " + + "WHERE firstname = 'Amber' " + + "UNION ALL " + + "SELECT dog_name as firstname " + + "FROM opensearch-sql_test_index_dog/dog " + + "WHERE holdersName = 'Hattie' OR dog_name = 'rex'"); assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java index 30cee86e15..65cacf16d2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static com.google.common.base.Strings.isNullOrEmpty; @@ -36,14 +35,14 @@ public class TestUtils { - private final static String MAPPING_FILE_PATH = "src/test/resources/indexDefinitions/"; + private static final String MAPPING_FILE_PATH = "src/test/resources/indexDefinitions/"; /** * Create test index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); @@ -54,16 +53,16 @@ public static void createIndexByRestClient(RestClient client, String indexName, } /** - * https://github.com/elastic/elasticsearch/pull/49959 - * Deprecate creation of dot-prefixed index names except for hidden and system indices. - * Create hidden index by REST client. + * https://github.com/elastic/elasticsearch/pull/49959
+ * Deprecate creation of dot-prefixed index names except for hidden and system indices. Create + * hidden index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ - public static void createHiddenIndexByRestClient(RestClient client, String indexName, - String mapping) { + public static void createHiddenIndexByRestClient( + RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); @@ -73,11 +72,10 @@ public static void createHiddenIndexByRestClient(RestClient client, String index } /** - * Check if index already exists by OpenSearch index exists API which returns: - * 200 - specified indices or aliases exist - * 404 - one or more indices specified or aliases do not exist + * Check if index already exists by OpenSearch index exists API which returns: 200 - specified + * indices or aliases exist 404 - one or more indices specified or aliases do not exist * - * @param client client connection + * @param client client connection * @param indexName index name * @return true for index exist */ @@ -93,13 +91,13 @@ public static boolean isIndexExist(RestClient client, String indexName) { /** * Load test data set by REST client. * - * @param client client connection - * @param indexName index name + * @param client client connection + * @param indexName index name * @param dataSetFilePath file path of test data set * @throws IOException */ - public static void loadDataByRestClient(RestClient client, String indexName, - String dataSetFilePath) throws IOException { + public static void loadDataByRestClient( + RestClient client, String indexName, String dataSetFilePath) throws IOException { Path path = Paths.get(getResourceFilePath(dataSetFilePath)); Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); request.setJsonEntity(new String(Files.readAllBytes(path))); @@ -109,7 +107,7 @@ public static void loadDataByRestClient(RestClient client, String indexName, /** * Perform a request by REST client. * - * @param client client connection + * @param client client connection * @param request request object */ public static Response performRequest(RestClient client, Request request) { @@ -177,7 +175,6 @@ public static String getEmployeeNestedTypeIndexMapping() { return getMappingFile(mappingFile); } - public static String getNestedTypeIndexMapping() { String mappingFile = "nested_type_index_mapping.json"; return getMappingFile(mappingFile); @@ -255,8 +252,8 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkRequest bulkRequest = new BulkRequest(); try (final InputStream stream = new FileInputStream(absJsonPath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { while (true) { @@ -285,8 +282,11 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); if (bulkResponse.hasFailures()) { - throw new Exception("Failed to load test data into index " + defaultIndex + ", " + - bulkResponse.buildFailureMessage()); + throw new Exception( + "Failed to load test data into index " + + defaultIndex + + ", " + + bulkResponse.buildFailureMessage()); } System.out.println(bulkResponse.getItems().length + " documents loaded."); // ensure the documents are searchable @@ -312,8 +312,8 @@ public static String getResponseBody(Response response, boolean retainNewLines) final StringBuilder sb = new StringBuilder(); try (final InputStream is = response.getEntity().getContent(); - final BufferedReader br = new BufferedReader( - new InputStreamReader(is, StandardCharsets.UTF_8))) { + final BufferedReader br = + new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { @@ -326,15 +326,14 @@ public static String getResponseBody(Response response, boolean retainNewLines) return sb.toString(); } - public static String fileToString(final String filePathFromProjectRoot, - final boolean removeNewLines) - throws IOException { + public static String fileToString( + final String filePathFromProjectRoot, final boolean removeNewLines) throws IOException { final String absolutePath = getResourceFilePath(filePathFromProjectRoot); try (final InputStream stream = new FileInputStream(absolutePath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { final StringBuilder stringBuilder = new StringBuilder(); String line = br.readLine(); @@ -388,12 +387,16 @@ public static List> getPermutations(final List items) { } final String currentItem = items.get(i); - result.addAll(getPermutations(smallerSet).stream().map(smallerSetPermutation -> { - final List permutation = new ArrayList<>(); - permutation.add(currentItem); - permutation.addAll(smallerSetPermutation); - return permutation; - }).collect(Collectors.toCollection(LinkedList::new))); + result.addAll( + getPermutations(smallerSet).stream() + .map( + smallerSetPermutation -> { + final List permutation = new ArrayList<>(); + permutation.add(currentItem); + permutation.addAll(smallerSetPermutation); + return permutation; + }) + .collect(Collectors.toCollection(LinkedList::new))); } return result; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java index 338be25a0c..29bc9813fa 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java @@ -3,66 +3,63 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; -/** - * Created by omershelef on 18/12/14. - */ +/** Created by omershelef on 18/12/14. */ public class TestsConstants { - public final static String PERSISTENT = "persistent"; - public final static String TRANSIENT = "transient"; + public static final String PERSISTENT = "persistent"; + public static final String TRANSIENT = "transient"; - public final static String TEST_INDEX = "opensearch-sql_test_index"; + public static final String TEST_INDEX = "opensearch-sql_test_index"; - public final static String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; - public final static String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; - public final static String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; - public final static String TEST_INDEX_DOG = TEST_INDEX + "_dog"; - public final static String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; - public final static String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; - public final static String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; - public final static String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; - public final static String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; - public final static String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; - public final static String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; - public final static String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; - public final static String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; - public final static String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; - public final static String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; - public final static String TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS = + public static final String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; + public static final String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; + public static final String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; + public static final String TEST_INDEX_DOG = TEST_INDEX + "_dog"; + public static final String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; + public static final String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; + public static final String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; + public static final String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; + public static final String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; + public static final String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; + public static final String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; + public static final String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; + public static final String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; + public static final String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; + public static final String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; + public static final String TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS = TEST_INDEX + "_nested_type_without_arrays"; - public final static String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; - public final static String TEST_INDEX_NESTED_WITH_QUOTES = + public static final String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; + public static final String TEST_INDEX_NESTED_WITH_QUOTES = TEST_INDEX + "_nested_type_with_quotes"; - public final static String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; - public final static String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; - public final static String TEST_INDEX_UNEXPANDED_OBJECT = TEST_INDEX + "_unexpanded_object"; - public final static String TEST_INDEX_BANK = TEST_INDEX + "_bank"; - public final static String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; - public final static String TEST_INDEX_BANK_WITH_NULL_VALUES = + public static final String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; + public static final String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; + public static final String TEST_INDEX_UNEXPANDED_OBJECT = TEST_INDEX + "_unexpanded_object"; + public static final String TEST_INDEX_BANK = TEST_INDEX + "_bank"; + public static final String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; + public static final String TEST_INDEX_BANK_WITH_NULL_VALUES = TEST_INDEX_BANK + "_with_null_values"; - public final static String TEST_INDEX_BANK_CSV_SANITIZE = TEST_INDEX_BANK + "_csv_sanitize"; - public final static String TEST_INDEX_BANK_RAW_SANITIZE = TEST_INDEX_BANK + "_raw_sanitize"; - public final static String TEST_INDEX_ORDER = TEST_INDEX + "_order"; - public final static String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; - public final static String TEST_INDEX_DATE = TEST_INDEX + "_date"; - public final static String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; - public final static String TEST_INDEX_DEEP_NESTED = TEST_INDEX + "_deep_nested"; - public final static String TEST_INDEX_STRINGS = TEST_INDEX + "_strings"; - public final static String TEST_INDEX_DATATYPE_NUMERIC = TEST_INDEX + "_datatypes_numeric"; - public final static String TEST_INDEX_DATATYPE_NONNUMERIC = TEST_INDEX + "_datatypes_nonnumeric"; - public final static String TEST_INDEX_BEER = TEST_INDEX + "_beer"; - public final static String TEST_INDEX_NULL_MISSING = TEST_INDEX + "_null_missing"; - public final static String TEST_INDEX_CALCS = TEST_INDEX + "_calcs"; - public final static String TEST_INDEX_DATE_FORMATS = TEST_INDEX + "_date_formats"; - public final static String TEST_INDEX_WILDCARD = TEST_INDEX + "_wildcard"; - public final static String TEST_INDEX_MULTI_NESTED_TYPE = TEST_INDEX + "_multi_nested"; - public final static String TEST_INDEX_NESTED_WITH_NULLS = TEST_INDEX + "_nested_with_nulls"; - public final static String DATASOURCES = ".ql-datasources"; + public static final String TEST_INDEX_BANK_CSV_SANITIZE = TEST_INDEX_BANK + "_csv_sanitize"; + public static final String TEST_INDEX_BANK_RAW_SANITIZE = TEST_INDEX_BANK + "_raw_sanitize"; + public static final String TEST_INDEX_ORDER = TEST_INDEX + "_order"; + public static final String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; + public static final String TEST_INDEX_DATE = TEST_INDEX + "_date"; + public static final String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; + public static final String TEST_INDEX_DEEP_NESTED = TEST_INDEX + "_deep_nested"; + public static final String TEST_INDEX_STRINGS = TEST_INDEX + "_strings"; + public static final String TEST_INDEX_DATATYPE_NUMERIC = TEST_INDEX + "_datatypes_numeric"; + public static final String TEST_INDEX_DATATYPE_NONNUMERIC = TEST_INDEX + "_datatypes_nonnumeric"; + public static final String TEST_INDEX_BEER = TEST_INDEX + "_beer"; + public static final String TEST_INDEX_NULL_MISSING = TEST_INDEX + "_null_missing"; + public static final String TEST_INDEX_CALCS = TEST_INDEX + "_calcs"; + public static final String TEST_INDEX_DATE_FORMATS = TEST_INDEX + "_date_formats"; + public static final String TEST_INDEX_WILDCARD = TEST_INDEX + "_wildcard"; + public static final String TEST_INDEX_MULTI_NESTED_TYPE = TEST_INDEX + "_multi_nested"; + public static final String TEST_INDEX_NESTED_WITH_NULLS = TEST_INDEX + "_nested_with_nulls"; + public static final String DATASOURCES = ".ql-datasources"; - public final static String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - public final static String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; - public final static String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; + public static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; + public static final String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; + public static final String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java index 646a38b011..421aae9622 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.util.MatcherUtils.schema; @@ -26,8 +25,8 @@ protected void init() throws Exception { @Test public void testAbsWithIntFieldReturnsInt() { JSONObject response = - executeJdbcRequest("SELECT ABS(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age LIMIT 5"); + executeJdbcRequest( + "SELECT ABS(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY age LIMIT 5"); verifySchema(response, schema("ABS(age)", null, "long")); } @@ -35,8 +34,10 @@ public void testAbsWithIntFieldReturnsInt() { @Test public void testCeilWithLongFieldReturnsLong() { JSONObject response = - executeJdbcRequest("SELECT CEIL(balance) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY balance LIMIT 5"); + executeJdbcRequest( + "SELECT CEIL(balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY balance LIMIT 5"); verifySchema(response, schema("CEIL(balance)", null, "long")); } @@ -46,8 +47,8 @@ public void testCeilWithLongFieldReturnsLong() { */ @Test public void testPiReturnsDouble() { - JSONObject response = executeJdbcRequest("SELECT PI() FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " LIMIT 1"); + JSONObject response = + executeJdbcRequest("SELECT PI() FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " LIMIT 1"); verifySchema(response, schema("PI()", null, "double")); } @@ -57,16 +58,22 @@ public void testPiReturnsDouble() { */ @Test public void testUpperWithStringFieldReturnsString() { - JSONObject response = executeJdbcRequest("SELECT UPPER(firstname) AS firstname_alias FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname_alias LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT UPPER(firstname) AS firstname_alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname_alias LIMIT 2"); verifySchema(response, schema("UPPER(firstname)", "firstname_alias", "keyword")); } @Test public void testLowerWithTextFieldReturnsText() { - JSONObject response = executeJdbcRequest("SELECT LOWER(firstname) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT LOWER(firstname) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("LOWER(firstname)", null, "keyword")); } @@ -76,8 +83,11 @@ public void testLowerWithTextFieldReturnsText() { */ @Test public void testLengthWithTextFieldReturnsInt() { - JSONObject response = executeJdbcRequest("SELECT length(firstname) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT length(firstname) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("length(firstname)", null, "integer")); } @@ -85,8 +95,10 @@ public void testLengthWithTextFieldReturnsInt() { @Test public void testLengthWithGroupByExpr() { JSONObject response = - executeJdbcRequest("SELECT Length(firstname) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " GROUP BY LENGTH(firstname) LIMIT 5"); + executeJdbcRequest( + "SELECT Length(firstname) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY LENGTH(firstname) LIMIT 5"); verifySchema(response, schema("Length(firstname)", null, "integer")); } @@ -96,16 +108,22 @@ public void testLengthWithGroupByExpr() { */ @Test public void testSinWithLongFieldReturnsDouble() { - JSONObject response = executeJdbcRequest("SELECT sin(balance) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT sin(balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("sin(balance)", null, "double")); } @Test public void testRadiansWithLongFieldReturnsDouble() { - JSONObject response = executeJdbcRequest("SELECT radians(balance) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT radians(balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("radians(balance)", null, "double")); } @@ -115,16 +133,22 @@ public void testRadiansWithLongFieldReturnsDouble() { */ @Test public void testAddWithIntReturnsInt() { - JSONObject response = executeJdbcRequest("SELECT (balance + 5) AS balance_add_five FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT (balance + 5) AS balance_add_five FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("(balance + 5)", "balance_add_five", "long")); } @Test public void testSubtractLongWithLongReturnsLong() { - JSONObject response = executeJdbcRequest("SELECT (balance - balance) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT (balance - balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("(balance - balance)", null, "long")); } @@ -134,17 +158,18 @@ public void testSubtractLongWithLongReturnsLong() { */ @Test public void testDayOfWeekWithKeywordReturnsText() { - JSONObject response = executeJdbcRequest("SELECT DAYOFWEEK(insert_time) FROM " - + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT DAYOFWEEK(insert_time) FROM " + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); - verifySchema(response, - schema("DAYOFWEEK(insert_time)", null, "integer")); + verifySchema(response, schema("DAYOFWEEK(insert_time)", null, "integer")); } @Test public void testYearWithKeywordReturnsText() { - JSONObject response = executeJdbcRequest("SELECT YEAR(insert_time) FROM " - + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT YEAR(insert_time) FROM " + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); verifySchema(response, schema("YEAR(insert_time)", null, "integer")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java index 1a244bed85..105669c7ca 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java @@ -1,182 +1,190 @@ - /* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.sql.ppl; - import org.json.JSONObject; - import org.junit.Test; - - import java.io.IOException; - - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; - import static org.opensearch.sql.util.MatcherUtils.rows; - import static org.opensearch.sql.util.MatcherUtils.schema; - import static org.opensearch.sql.util.MatcherUtils.verifySchema; - import static org.opensearch.sql.util.MatcherUtils.verifySome; - - public class ConvertTZFunctionIT extends PPLIntegTestCase { - - - @Override - public void init() throws IOException { - loadIndex(Index.DATE); - } - - - @Test - public void inRangeZeroToPositive() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); - } - - @Test - public void inRangeZeroToZero() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); - } - - @Test - public void inRangePositiveToPositive() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); - } - - @Test - public void inRangeNegativeToPositive() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); - } - - @Test - public void inRangeNoTZChange() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); - } - - @Test - public void inRangeTwentyFourHourChange() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); - } - - @Test - public void inRangeFifteenMinuteTZ() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); - } - - @Test - public void nullFromFieldUnder() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullToFieldOver() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullFromGarbageInput1() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','test') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullFromGarbageInput2() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021test','-12:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullDateTimeInvalidDateValueFebruary() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-02-30 10:00:00','+00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullDateTimeInvalidDateValueApril() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-04-31 10:00:00','+00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullDateTimeInvalidDateValueMonth() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-13-03 10:00:00','+00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; +import static org.opensearch.sql.util.MatcherUtils.rows; +import static org.opensearch.sql.util.MatcherUtils.schema; +import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import static org.opensearch.sql.util.MatcherUtils.verifySome; +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; + +public class ConvertTZFunctionIT extends PPLIntegTestCase { + + @Override + public void init() throws IOException { + loadIndex(Index.DATE); + } + + @Test + public void inRangeZeroToPositive() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); + } + + @Test + public void inRangeZeroToZero() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); + } + + @Test + public void inRangePositiveToPositive() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); + } + + @Test + public void inRangeNegativeToPositive() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); + } + + @Test + public void inRangeNoTZChange() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); + } + + @Test + public void inRangeTwentyFourHourChange() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); + } + + @Test + public void inRangeFifteenMinuteTZ() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); + } + + @Test + public void nullFromFieldUnder() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullToFieldOver() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullFromGarbageInput1() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','test') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullFromGarbageInput2() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021test','-12:00','+00:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullDateTimeInvalidDateValueFebruary() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-02-30 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullDateTimeInvalidDateValueApril() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-04-31 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullDateTimeInvalidDateValueMonth() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-13-03 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java index 9f3fc36bde..f89c40f4d0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -29,7 +28,7 @@ public class CrossClusterSearchIT extends PPLIntegTestCase { var remote = "remoteCluster"; for (var cluster : clusterNames) { if (cluster.startsWith("remote")) { - remote = cluster; + remote = cluster; } } REMOTE_CLUSTER = remote; @@ -37,10 +36,11 @@ public class CrossClusterSearchIT extends PPLIntegTestCase { public static final String REMOTE_CLUSTER; - private final static String TEST_INDEX_BANK_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_BANK; - private final static String TEST_INDEX_DOG_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; - private final static String TEST_INDEX_DOG_MATCH_ALL_REMOTE = MATCH_ALL_REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; - private final static String TEST_INDEX_ACCOUNT_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_ACCOUNT; + private static final String TEST_INDEX_BANK_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_BANK; + private static final String TEST_INDEX_DOG_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; + private static final String TEST_INDEX_DOG_MATCH_ALL_REMOTE = + MATCH_ALL_REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; + private static final String TEST_INDEX_ACCOUNT_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_ACCOUNT; private static boolean initialized = false; @@ -71,32 +71,39 @@ public void testCrossClusterSearchAllFields() throws IOException { @Test public void testMatchAllCrossClusterSearchAllFields() throws IOException { - JSONObject result = executeQuery(String.format("search source=%s", TEST_INDEX_DOG_MATCH_ALL_REMOTE)); + JSONObject result = + executeQuery(String.format("search source=%s", TEST_INDEX_DOG_MATCH_ALL_REMOTE)); verifyColumn(result, columnName("dog_name"), columnName("holdersName"), columnName("age")); } @Test public void testCrossClusterSearchWithoutLocalFieldMappingShouldFail() throws IOException { - var exception = assertThrows(ResponseException.class, () -> - executeQuery(String.format("search source=%s", TEST_INDEX_ACCOUNT_REMOTE))); - assertTrue(exception.getMessage().contains("IndexNotFoundException") - && exception.getMessage().contains("400 Bad Request")); + var exception = + assertThrows( + ResponseException.class, + () -> executeQuery(String.format("search source=%s", TEST_INDEX_ACCOUNT_REMOTE))); + assertTrue( + exception.getMessage().contains("IndexNotFoundException") + && exception.getMessage().contains("400 Bad Request")); } @Test public void testCrossClusterSearchCommandWithLogicalExpression() throws IOException { - JSONObject result = executeQuery(String.format( - "search source=%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK_REMOTE)); + JSONObject result = + executeQuery( + String.format( + "search source=%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK_REMOTE)); verifyDataRows(result, rows("Hattie")); } @Test public void testCrossClusterSearchMultiClusters() throws IOException { - JSONObject result = executeQuery(String.format( - "search source=%s,%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK_REMOTE, TEST_INDEX_BANK)); - verifyDataRows(result, - rows("Hattie"), - rows("Hattie")); + JSONObject result = + executeQuery( + String.format( + "search source=%s,%s firstname='Hattie' | fields firstname", + TEST_INDEX_BANK_REMOTE, TEST_INDEX_BANK)); + verifyDataRows(result, rows("Hattie"), rows("Hattie")); } @Test @@ -127,8 +134,7 @@ public void testCrossClusterDescribeAllFields() throws IOException { columnName("SCOPE_TABLE"), columnName("SOURCE_DATA_TYPE"), columnName("IS_AUTOINCREMENT"), - columnName("IS_GENERATEDCOLUMN") - ); + columnName("IS_GENERATEDCOLUMN")); } @Test @@ -159,7 +165,6 @@ public void testMatchAllCrossClusterDescribeAllFields() throws IOException { columnName("SCOPE_TABLE"), columnName("SOURCE_DATA_TYPE"), columnName("IS_AUTOINCREMENT"), - columnName("IS_GENERATEDCOLUMN") - ); + columnName("IS_GENERATEDCOLUMN")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java index 430ae9a7b2..a9eb18c2a1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; @@ -22,29 +21,40 @@ public void init() throws IOException { @Test public void sanitizeTest() throws IOException { - String result = executeCsvQuery( - String.format(Locale.ROOT, "source=%s | fields firstname, lastname", TEST_INDEX_BANK_CSV_SANITIZE)); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "'+Amber JOHnny,Duke Willmington+%n" - + "'-Hattie,Bond-%n" - + "'=Nanette,Bates=%n" - + "'@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeCsvQuery( + String.format( + Locale.ROOT, + "source=%s | fields firstname, lastname", + TEST_INDEX_BANK_CSV_SANITIZE)); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "'+Amber JOHnny,Duke Willmington+%n" + + "'-Hattie,Bond-%n" + + "'=Nanette,Bates=%n" + + "'@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @Test public void escapeSanitizeTest() throws IOException { - String result = executeCsvQuery( - String.format(Locale.ROOT, "source=%s | fields firstname, lastname", TEST_INDEX_BANK_CSV_SANITIZE), false); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "+Amber JOHnny,Duke Willmington+%n" - + "-Hattie,Bond-%n" - + "=Nanette,Bates=%n" - + "@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeCsvQuery( + String.format( + Locale.ROOT, + "source=%s | fields firstname, lastname", + TEST_INDEX_BANK_CSV_SANITIZE), + false); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "+Amber JOHnny,Duke Willmington+%n" + + "-Hattie,Bond-%n" + + "=Nanette,Bates=%n" + + "@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java index 9911c35d8f..8b5a6d498e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.SQLIntegTestCase.Index.DATA_TYPE_NONNUMERIC; @@ -27,9 +26,9 @@ public void init() throws IOException { @Test public void test_numeric_data_types() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s", TEST_INDEX_DATATYPE_NUMERIC)); - verifySchema(result, + JSONObject result = executeQuery(String.format("source=%s", TEST_INDEX_DATATYPE_NUMERIC)); + verifySchema( + result, schema("long_number", "long"), schema("integer_number", "integer"), schema("short_number", "short"), @@ -42,9 +41,9 @@ public void test_numeric_data_types() throws IOException { @Test public void test_nonnumeric_data_types() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s", TEST_INDEX_DATATYPE_NONNUMERIC)); - verifySchema(result, + JSONObject result = executeQuery(String.format("source=%s", TEST_INDEX_DATATYPE_NONNUMERIC)); + verifySchema( + result, schema("boolean_value", "boolean"), schema("keyword_value", "string"), schema("text_value", "string"), @@ -58,15 +57,18 @@ public void test_nonnumeric_data_types() throws IOException { @Test public void test_long_integer_data_type() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval " - + " int1 = 2147483647," - + " int2 = -2147483648," - + " long1 = 2147483648," - + " long2 = -2147483649 | " - + "fields int1, int2, long1, long2 ", - TEST_INDEX_DATATYPE_NUMERIC)); - verifySchema(result, + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " int1 = 2147483647," + + " int2 = -2147483648," + + " long1 = 2147483648," + + " long2 = -2147483649 | " + + "fields int1, int2, long1, long2 ", + TEST_INDEX_DATATYPE_NUMERIC)); + verifySchema( + result, schema("int1", "integer"), schema("int2", "integer"), schema("long1", "long"), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java index 4fb61ae2e9..6f6b5cc297 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java @@ -47,9 +47,10 @@ public void resetTimeZone() { private String name; private Boolean expectedResult; - public DateTimeComparisonIT(@Name("functionCall") String functionCall, - @Name("name") String name, - @Name("expectedResult") Boolean expectedResult) { + public DateTimeComparisonIT( + @Name("functionCall") String functionCall, + @Name("name") String name, + @Name("expectedResult") Boolean expectedResult) { this.functionCall = functionCall; this.name = name; this.expectedResult = expectedResult; @@ -57,548 +58,707 @@ public DateTimeComparisonIT(@Name("functionCall") String functionCall, @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), - $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), - $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), - $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), - $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), - $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), - $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), - $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), - $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), - $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), - $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), - $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), - $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), - $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), - $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), + $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), + $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), + $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), + $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), + $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), + $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), + $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), + $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), + $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), + $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), + $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), + $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), + $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), + $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimes() { - return Arrays.asList($$( - $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), - $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), - $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), - $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), - $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), - $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), - $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), - $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), - $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), - $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), - $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), - $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), - $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), - $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), - $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), + $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), + $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), + $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), + $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), + $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), + $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), + $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), + $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), + $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), + $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), + $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), + $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), + $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), + $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDateTimes() { - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $("DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), + $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), + $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), + $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), + $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), + $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), + $( + "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), - $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), - $("TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", "neq3", false), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), - $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", "gte3", false), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), + $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", + "neq3", + false), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), + $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), + $( + "TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", + "gte3", + false), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), - $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), + $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), + $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), + $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), - $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), + $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), + $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), + $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), - $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), + $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), + $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false))); } @Test public void testCompare() throws IOException { - var result = executeQuery(String.format("source=%s | eval `%s` = %s | fields `%s`", - TEST_INDEX_DATATYPE_NONNUMERIC, name, functionCall, name)); + var result = + executeQuery( + String.format( + "source=%s | eval `%s` = %s | fields `%s`", + TEST_INDEX_DATATYPE_NONNUMERIC, name, functionCall, name)); verifySchema(result, schema(name, null, "boolean")); verifyDataRows(result, rows(expectedResult)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java index b75b0ecaef..1df87a87b3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -53,826 +52,1151 @@ public void resetTimeZone() { @Test public void testAddDateWithDays() throws IOException { - var result = executeQuery(String.format("source=%s | eval " - + " f = adddate(date('2020-09-16'), 1)" - + " | fields f", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval " + " f = adddate(date('2020-09-16'), 1)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(timestamp('2020-09-16 17:30:00'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(timestamp('2020-09-16 17:30:00'), 1)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 07:40:00'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(DATETIME('2020-09-16 07:40:00'), 1)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 07:40:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(TIME('07:40:00'), 0)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = adddate(TIME('07:40:00'), 0)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @Test public void testAddDateWithInterval() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(date('2020-09-16'), interval 1 day) " - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(date('2020-09-16'), interval 1 day) " + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testConvertTZ() throws IOException { JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void testDateAdd() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(date('2020-09-16'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(date('2020-09-16'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = DATE_ADD(birthdate, INTERVAL 1 YEAR)" - + " | fields f", TEST_INDEX_BANK)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = DATE_ADD(birthdate, INTERVAL 1 YEAR)" + " | fields f", + TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "datetime")); - verifyDataRows(result, - rows("2018-10-23 00:00:00"), - rows("2018-11-20 00:00:00"), - rows("2019-06-23 00:00:00"), - rows("2019-11-13 23:33:20"), - rows("2019-06-27 00:00:00"), - rows("2019-08-19 00:00:00"), - rows("2019-08-11 00:00:00")); + verifyDataRows( + result, + rows("2018-10-23 00:00:00"), + rows("2018-11-20 00:00:00"), + rows("2019-06-23 00:00:00"), + rows("2019-11-13 23:33:20"), + rows("2019-06-27 00:00:00"), + rows("2019-08-19 00:00:00"), + rows("2019-08-11 00:00:00")); } @Test public void testDateTime() throws IOException { JSONObject result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + + " | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2003-02-28 23:00:00-10:00', '+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2003-02-28 23:00:00-10:00', '+10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2003-03-01 19:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '-14:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '-14:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void testDateSub() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(date('2020-09-16'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(date('2020-09-16'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testDay() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); - result = executeQuery(String.format( - "source=%s | eval f = day('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = day('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); } @Test public void testDay_of_week() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day_of_week(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day_of_week(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); - result = executeQuery(String.format( - "source=%s | eval f = day_of_week('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = day_of_week('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test public void testDay_of_month() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day_of_month(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day_of_month(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); - result = executeQuery(String.format( - "source=%s | eval f = day_of_month('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = day_of_month('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); } @Test public void testDay_of_year() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day_of_year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day_of_year(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); - result = executeQuery(String.format( - "source=%s | eval f = day_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = day_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); } @Test public void testDayName() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("Wednesday")); - result = executeQuery(String.format( - "source=%s | eval f = dayname('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayname('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("Wednesday")); } @Test public void testDayOfMonth() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayofmonth(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayofmonth(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); - result = executeQuery(String.format( - "source=%s | eval f = dayofmonth('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayofmonth('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); } @Test public void testDayOfWeek() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayofweek(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayofweek(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); - result = executeQuery(String.format( - "source=%s | eval f = dayofweek('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayofweek('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test public void testDayOfYear() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayofyear(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayofyear(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); - result = executeQuery(String.format( - "source=%s | eval f = dayofyear('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayofyear('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); } @Test public void testFromDays() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = from_days(738049) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format("source=%s | eval f = from_days(738049) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16")); } @Test public void testHour() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = hour(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = hour(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = hour('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); } @Test public void testHour_of_day() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = hour_of_day(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour_of_day(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour_of_day('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); } @Test public void testMicrosecond() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.123456')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.123456')) |" + + " fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123456)); // Explicit timestamp value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.1234')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.1234')) | fields" + + " f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); - result = executeQuery(String.format( - "source=%s | eval f = microsecond(time('17:30:00.000010')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(time('17:30:00.000010')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(10)); // Explicit time value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond(time('17:30:00.1234')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(time('17:30:00.1234')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); - result = executeQuery(String.format( - "source=%s | eval f = microsecond('2020-09-16 17:30:00.123456') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('2020-09-16 17:30:00.123456') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123456)); // Implicit timestamp value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond('2020-09-16 17:30:00.1234') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('2020-09-16 17:30:00.1234') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); - result = executeQuery(String.format( - "source=%s | eval f = microsecond('17:30:00.000010') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('17:30:00.000010') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(10)); // Implicit time value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond('17:30:00.1234') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('17:30:00.1234') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); } @Test public void testMinute() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = minute(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = minute(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = minute('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); } @Test public void testMinute_of_hour() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour(time('17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); } @Test public void testMinute_of_day() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = minute_of_day(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_day(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day(time('17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_day('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); } @Test public void testMonth() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = month(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = month(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); - result = executeQuery(String.format( - "source=%s | eval f = month('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = month('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); } @Test public void testMonth_of_year() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = month_of_year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = month_of_year(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); - result = executeQuery(String.format( - "source=%s | eval f = month_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = month_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); } @Test public void testMonthName() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = monthname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = monthname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("September")); - result = executeQuery(String.format( - "source=%s | eval f = monthname('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = monthname('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("September")); } @Test public void testQuarter() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = quarter(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = quarter(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(3)); - result = executeQuery(String.format( - "source=%s | eval f = quarter('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = quarter('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(3)); } @Test public void testSecond() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = second(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = second(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = second('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); } @Test public void testSecond_of_minute() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = second_of_minute(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute(timestamp('2020-09-16 17:30:00')) | fields" + + " f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second_of_minute(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute(time('17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second_of_minute('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second_of_minute('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); } @Test public void testSubDateDays() throws IOException { - var result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), 1)" - + " | fields f", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval " + " f = subdate(date('2020-09-16'), 1)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(timestamp('2020-09-16 17:30:00'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(timestamp('2020-09-16 17:30:00'), 1)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = subdate(date('2020-09-16'), 1)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(TIME('07:40:00'), 0)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = subdate(TIME('07:40:00'), 0)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @Test public void testSubDateInterval() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), interval 1 day) " - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(date('2020-09-16'), interval 1 day) " + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testTimeToSec() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = time_to_sec(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = time_to_sec(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(63000)); - result = executeQuery(String.format( - "source=%s | eval f = time_to_sec('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = time_to_sec('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(63000)); } @Test public void testToDays() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = to_days(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = to_days(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(738049)); - result = executeQuery(String.format( - "source=%s | eval f = to_days('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = to_days('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(738049)); } private void week(String date, int mode, int expectedResult) throws IOException { - JSONObject result = executeQuery(StringUtils.format( - "source=%s | eval f = week(date('%s'), %d) | fields f", TEST_INDEX_DATE, date, mode)); + JSONObject result = + executeQuery( + StringUtils.format( + "source=%s | eval f = week(date('%s'), %d) | fields f", + TEST_INDEX_DATE, date, mode)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(expectedResult)); } @Test public void testWeek() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = week(date('2008-02-20')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = week(date('2008-02-20')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(7)); @@ -885,35 +1209,46 @@ public void testWeek() throws IOException { @Test public void testWeek_of_year() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = week_of_year(date('2008-02-20')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = week_of_year(date('2008-02-20')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(7)); } @Test public void testYear() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(2020)); - result = executeQuery(String.format( - "source=%s | eval f = year('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = year('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(2020)); } - void verifyDateFormat(String date, String type, String format, String formatted) throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = date_format(%s('%s'), '%s') | fields f", - TEST_INDEX_DATE, type, date, format)); + void verifyDateFormat(String date, String type, String format, String formatted) + throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = date_format(%s('%s'), '%s') | fields f", + TEST_INDEX_DATE, type, date, format)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows(formatted)); - result = executeQuery(String.format( - "source=%s | eval f = date_format('%s', '%s') | fields f", - TEST_INDEX_DATE, date, format)); + result = + executeQuery( + String.format( + "source=%s | eval f = date_format('%s', '%s') | fields f", + TEST_INDEX_DATE, date, format)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows(formatted)); } @@ -921,10 +1256,11 @@ void verifyDateFormat(String date, String type, String format, String formatted) @Test public void testDateFormat() throws IOException { String timestamp = "1998-01-31 13:14:15.012345"; - String timestampFormat = "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " - + "%m %p %r %S %s %T %% %P"; - String timestampFormatted = "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " - + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; + String timestampFormat = + "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " + "%m %p %r %S %s %T %% %P"; + String timestampFormatted = + "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " + + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; verifyDateFormat(timestamp, "timestamp", timestampFormat, timestampFormatted); String date = "1998-01-31"; @@ -948,76 +1284,119 @@ public void testDateFormatISO8601() throws IOException { @Test public void testMakeTime() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = MAKETIME(20, 30, 40), f2 = MAKETIME(20.2, 49.5, 42.100502) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = MAKETIME(20, 30, 40), f2 = MAKETIME(20.2, 49.5, 42.100502) |" + + " fields f1, f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "time"), schema("f2", null, "time")); verifySome(result.getJSONArray("datarows"), rows("20:30:40", "20:50:42.100502")); } @Test public void testMakeDate() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = MAKEDATE(1945, 5.9), f2 = MAKEDATE(1984, 1984) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = MAKEDATE(1945, 5.9), f2 = MAKEDATE(1984, 1984) | fields f1," + + " f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "date"), schema("f2", null, "date")); verifySome(result.getJSONArray("datarows"), rows("1945-01-06", "1989-06-06")); } @Test public void testAddTime() throws IOException { - var result = executeQuery(String.format("source=%s | eval" - + " `'2008-12-12' + 0` = ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))," - + " `'23:59:59' + 0` = ADDTIME(TIME('23:59:59'), DATE('2004-01-01'))," - + " `'2004-01-01' + '23:59:59'` = ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))," - + " `'10:20:30' + '00:05:42'` = ADDTIME(TIME('10:20:30'), TIME('00:05:42'))," - + " `'15:42:13' + '09:07:00'` = ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" - + " | fields `'2008-12-12' + 0`, `'23:59:59' + 0`, `'2004-01-01' + '23:59:59'`, `'10:20:30' + '00:05:42'`, `'15:42:13' + '09:07:00'`", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval `'2008-12-12' + 0` = ADDTIME(DATE('2008-12-12')," + + " DATE('2008-11-15')), `'23:59:59' + 0` = ADDTIME(TIME('23:59:59')," + + " DATE('2004-01-01')), `'2004-01-01' + '23:59:59'` =" + + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' + '00:05:42'` =" + + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' + '09:07:00'` =" + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " | fields `'2008-12-12' + 0`, `'23:59:59' + 0`, `'2004-01-01' + '23:59:59'`," + + " `'10:20:30' + '00:05:42'`, `'15:42:13' + '09:07:00'`", + TEST_INDEX_DATE)); + verifySchema( + result, schema("'2008-12-12' + 0", null, "datetime"), schema("'23:59:59' + 0", null, "time"), schema("'2004-01-01' + '23:59:59'", null, "datetime"), schema("'10:20:30' + '00:05:42'", null, "time"), schema("'15:42:13' + '09:07:00'", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2008-12-12 00:00:00", "23:59:59", "2004-01-01 23:59:59", "10:26:12", "2000-01-01 00:49:13")); + verifySome( + result.getJSONArray("datarows"), + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2004-01-01 23:59:59", + "10:26:12", + "2000-01-01 00:49:13")); } @Test public void testSubTime() throws IOException { - var result = executeQuery(String.format("source=%s | eval" - + " `'2008-12-12' - 0` = SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))," - + " `'23:59:59' - 0` = SUBTIME(TIME('23:59:59'), DATE('2004-01-01'))," - + " `'2004-01-01' - '23:59:59'` = SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))," - + " `'10:20:30' - '00:05:42'` = SUBTIME(TIME('10:20:30'), TIME('00:05:42'))," - + " `'15:42:13' - '09:07:00'` = SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" - + " | fields `'2008-12-12' - 0`, `'23:59:59' - 0`, `'2004-01-01' - '23:59:59'`, `'10:20:30' - '00:05:42'`, `'15:42:13' - '09:07:00'`", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval `'2008-12-12' - 0` = SUBTIME(DATE('2008-12-12')," + + " DATE('2008-11-15')), `'23:59:59' - 0` = SUBTIME(TIME('23:59:59')," + + " DATE('2004-01-01')), `'2004-01-01' - '23:59:59'` =" + + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' - '00:05:42'` =" + + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' - '09:07:00'` =" + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " | fields `'2008-12-12' - 0`, `'23:59:59' - 0`, `'2004-01-01' - '23:59:59'`," + + " `'10:20:30' - '00:05:42'`, `'15:42:13' - '09:07:00'`", + TEST_INDEX_DATE)); + verifySchema( + result, schema("'2008-12-12' - 0", null, "datetime"), schema("'23:59:59' - 0", null, "time"), schema("'2004-01-01' - '23:59:59'", null, "datetime"), schema("'10:20:30' - '00:05:42'", null, "time"), schema("'15:42:13' - '09:07:00'", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2008-12-12 00:00:00", "23:59:59", "2003-12-31 00:00:01", "10:14:48", "1999-12-31 06:35:13")); + verifySome( + result.getJSONArray("datarows"), + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2003-12-31 00:00:01", + "10:14:48", + "1999-12-31 06:35:13")); } @Test public void testFromUnixTime() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = FROM_UNIXTIME(200300400), f2 = FROM_UNIXTIME(12224.12), " - + "f3 = FROM_UNIXTIME(1662601316, '%%T') | fields f1, f2, f3", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval f1 = FROM_UNIXTIME(200300400), f2 = FROM_UNIXTIME(12224.12), " + + "f3 = FROM_UNIXTIME(1662601316, '%%T') | fields f1, f2, f3", + TEST_INDEX_DATE)); + verifySchema( + result, schema("f1", null, "datetime"), schema("f2", null, "datetime"), schema("f3", null, "string")); - verifySome(result.getJSONArray("datarows"), + verifySome( + result.getJSONArray("datarows"), rows("1976-05-07 07:00:00", "1970-01-01 03:23:44.12", "01:41:56")); } @Test public void testUnixTimeStamp() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = UNIX_TIMESTAMP(MAKEDATE(1984, 1984)), " - + "f2 = UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')), " - + "f3 = UNIX_TIMESTAMP(20771122143845) | fields f1, f2, f3", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval f1 = UNIX_TIMESTAMP(MAKEDATE(1984, 1984)), " + + "f2 = UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')), " + + "f3 = UNIX_TIMESTAMP(20771122143845) | fields f1, f2, f3", + TEST_INDEX_DATE)); + verifySchema( + result, schema("f1", null, "double"), schema("f2", null, "double"), schema("f3", null, "double")); @@ -1026,28 +1405,43 @@ public void testUnixTimeStamp() throws IOException { @Test public void testPeriodAdd() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = PERIOD_ADD(200801, 2), f2 = PERIOD_ADD(200801, -12) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = PERIOD_ADD(200801, 2), f2 = PERIOD_ADD(200801, -12) | fields" + + " f1, f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "integer"), schema("f2", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(200803, 200701)); } @Test public void testPeriodDiff() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = PERIOD_DIFF(200802, 200703), f2 = PERIOD_DIFF(200802, 201003) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = PERIOD_DIFF(200802, 200703), f2 = PERIOD_DIFF(200802," + + " 201003) | fields f1, f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "integer"), schema("f2", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(11, -25)); } public void testDateDiff() throws IOException { - var result = executeQuery(String.format("source=%s | eval" - + " `'2000-01-02' - '2000-01-01'` = DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59'))," - + " `'2001-02-01' - '2004-01-01'` = DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00'))," - + " `'2004-01-01' - '2002-02-01'` = DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))," - + " `today - today` = DATEDIFF(TIME('23:59:59'), TIME('00:00:00'))" - + " | fields `'2000-01-02' - '2000-01-01'`, `'2001-02-01' - '2004-01-01'`, `'2004-01-01' - '2002-02-01'`, `today - today`", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval `'2000-01-02' - '2000-01-01'` = DATEDIFF(TIMESTAMP('2000-01-02" + + " 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')), `'2001-02-01' -" + + " '2004-01-01'` = DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01" + + " 00:00:00')), `'2004-01-01' - '2002-02-01'` = DATEDIFF(TIMESTAMP('2004-01-01" + + " 00:00:00'), DATETIME('2002-02-01 14:25:30')), `today - today` =" + + " DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) | fields `'2000-01-02' -" + + " '2000-01-01'`, `'2001-02-01' - '2004-01-01'`, `'2004-01-01' -" + + " '2002-02-01'`, `today - today`", + TEST_INDEX_DATE)); + verifySchema( + result, schema("'2000-01-02' - '2000-01-01'", null, "long"), schema("'2001-02-01' - '2004-01-01'", null, "long"), schema("'2004-01-01' - '2002-02-01'", null, "long"), @@ -1057,90 +1451,124 @@ public void testDateDiff() throws IOException { @Test public void testTimeDiff() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f = TIMEDIFF('23:59:59', '13:00:00') | fields f", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f = TIMEDIFF('23:59:59', '13:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "time")); verifySome(result.getJSONArray("datarows"), rows("10:59:59")); } @Test - public void testGetFormat() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = date_format('2003-10-03', get_format(DATE,'USA')) | fields f", TEST_INDEX_DATE)); + public void testGetFormat() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = date_format('2003-10-03', get_format(DATE,'USA')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("10.03.2003")); } @Test - public void testLastDay() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = last_day('2003-10-03') | fields f", TEST_INDEX_DATE)); + public void testLastDay() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = last_day('2003-10-03') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2003-10-31")); } @Test - public void testSecToTime() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = sec_to_time(123456) | fields f", TEST_INDEX_DATE)); + public void testSecToTime() throws IOException { + var result = + executeQuery( + String.format("source=%s | eval f = sec_to_time(123456) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "time")); verifySome(result.getJSONArray("datarows"), rows("10:17:36")); } @Test - public void testYearWeek() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f1 = yearweek('2003-10-03') | eval f2 = yearweek('2003-10-03', 3) | fields f1, f2", TEST_INDEX_DATE)); - verifySchema(result, - schema("f1", null, "integer"), - schema("f2", null, "integer")); + public void testYearWeek() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f1 = yearweek('2003-10-03') | eval f2 = yearweek('2003-10-03', 3)" + + " | fields f1, f2", + TEST_INDEX_DATE)); + verifySchema(result, schema("f1", null, "integer"), schema("f2", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(200339, 200340)); } @Test - public void testWeekDay() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = weekday('2003-10-03') | fields f", TEST_INDEX_DATE)); + public void testWeekDay() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = weekday('2003-10-03') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test - public void testToSeconds() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f1 = to_seconds(date('2008-10-07')) | " + - "eval f2 = to_seconds('2020-09-16 07:40:00') | " + - "eval f3 = to_seconds(DATETIME('2020-09-16 07:40:00')) | fields f1, f2, f3", TEST_INDEX_DATE)); - verifySchema(result, - schema("f1", null, "long"), - schema("f2", null, "long"), - schema("f3", null, "long")); + public void testToSeconds() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f1 = to_seconds(date('2008-10-07')) | " + + "eval f2 = to_seconds('2020-09-16 07:40:00') | " + + "eval f3 = to_seconds(DATETIME('2020-09-16 07:40:00')) | fields f1, f2, f3", + TEST_INDEX_DATE)); + verifySchema( + result, schema("f1", null, "long"), schema("f2", null, "long"), schema("f3", null, "long")); verifySome(result.getJSONArray("datarows"), rows(63390556800L, 63767461200L, 63767461200L)); } @Test - public void testStrToDate() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = str_to_date('01,5,2013', '%s') | fields f", TEST_INDEX_DATE, "%d,%m,%Y")); + public void testStrToDate() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = str_to_date('01,5,2013', '%s') | fields f", + TEST_INDEX_DATE, "%d,%m,%Y")); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2013-05-01 00:00:00")); } @Test - public void testTimeStampAdd() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = timestampadd(YEAR, 15, '2001-03-06 00:00:00') | fields f", TEST_INDEX_DATE)); + public void testTimeStampAdd() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = timestampadd(YEAR, 15, '2001-03-06 00:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2016-03-06 00:00:00")); } @Test - public void testTimestampDiff() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06 00:00:00') | fields f", TEST_INDEX_DATE)); + public void testTimestampDiff() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06" + + " 00:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test - public void testExtract() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f1 = extract(YEAR FROM '1997-01-01 00:00:00') | eval f2 = extract(MINUTE FROM time('10:17:36')) | fields f1, f2", TEST_INDEX_DATE)); - verifySchema(result, - schema("f1", null, "long"), - schema("f2", null, "long")); + public void testExtract() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f1 = extract(YEAR FROM '1997-01-01 00:00:00') | eval f2 =" + + " extract(MINUTE FROM time('10:17:36')) | fields f1, f2", + TEST_INDEX_DATE)); + verifySchema(result, schema("f1", null, "long"), schema("f2", null, "long")); verifySome(result.getJSONArray("datarows"), rows(1997L, 17L)); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java index 3f24b619f5..dd86470a39 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java @@ -17,131 +17,163 @@ public class DateTimeImplementationIT extends PPLIntegTestCase { - @Override public void init() throws IOException { loadIndex(Index.DATE); } - @Test public void inRangeZeroToStringTZ() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); } @Test public void inRangeZeroToPositive() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); } @Test public void inRangeNegativeToPositive() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); } @Test public void inRangeTwentyHourOffset() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); } - @Test public void inRangeYearChange() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); } @Test public void inRangeZeroToMax() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); } @Test public void inRangeNoToTZ() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @Test public void inRangeNoTZ() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @Test public void nullField3Over() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullField2Under() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullTField3Over() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00', '+15:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '+15:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2021-02-30 10:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2021-02-30 10:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2021-04-31 10:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2021-04-31 10:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2021-13-03 10:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2021-13-03 10:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java index bd4fadb57f..7a6cf16bb4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java index 23bea69a52..aee32e08d1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DOG; @@ -52,19 +51,17 @@ public void testDescribeAllFields() throws IOException { columnName("SCOPE_TABLE"), columnName("SOURCE_DATA_TYPE"), columnName("IS_AUTOINCREMENT"), - columnName("IS_GENERATEDCOLUMN") - ); + columnName("IS_GENERATEDCOLUMN")); } @Test public void testDescribeFilterFields() throws IOException { - JSONObject result = executeQuery(String.format("describe %s | fields TABLE_NAME, COLUMN_NAME, TYPE_NAME", TEST_INDEX_DOG)); + JSONObject result = + executeQuery( + String.format( + "describe %s | fields TABLE_NAME, COLUMN_NAME, TYPE_NAME", TEST_INDEX_DOG)); verifyColumn( - result, - columnName("TABLE_NAME"), - columnName("COLUMN_NAME"), - columnName("TYPE_NAME") - ); + result, columnName("TABLE_NAME"), columnName("COLUMN_NAME"), columnName("TYPE_NAME")); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java index 1a785e9074..fce975ef92 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.util.MatcherUtils.assertJsonEquals; @@ -35,8 +34,7 @@ public void testExplain() throws Exception { + "| fields - city " + "| eval age2 = avg_age + 2 " + "| dedup age2 " - + "| fields age2") - ); + + "| fields age2")); } @Test @@ -50,8 +48,7 @@ public void testFilterPushDownExplain() throws Exception { + "| where age > 30 " + "| where age < 40 " + "| where balance > 10000 " - + "| fields age") - ); + + "| fields age")); } @Test @@ -63,8 +60,7 @@ public void testFilterAndAggPushDownExplain() throws Exception { explainQueryToString( "source=opensearch-sql_test_index_account" + "| where age > 30 " - + "| stats avg(age) AS avg_age by state, city") - ); + + "| stats avg(age) AS avg_age by state, city")); } @Test @@ -77,8 +73,7 @@ public void testSortPushDownExplain() throws Exception { "source=opensearch-sql_test_index_account" + "| sort age " + "| where age > 30" - + "| fields age") - ); + + "| fields age")); } String loadFromFile(String filename) throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java index 4eb99e8b04..e8a287c80e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -43,7 +42,9 @@ public void testFieldsWithMultiFields() throws IOException { verifyColumn(result, columnName("firstname"), columnName("lastname")); } - @Ignore("Cannot resolve wildcard yet. Enable once https://github.com/opensearch-project/sql/issues/787 is resolved.") + @Ignore( + "Cannot resolve wildcard yet. Enable once" + + " https://github.com/opensearch-project/sql/issues/787 is resolved.") @Test public void testFieldsWildCard() throws IOException { JSONObject result = @@ -57,14 +58,14 @@ public void testSelectDateTypeField() throws IOException { executeQuery(String.format("source=%s | fields birthdate", TEST_INDEX_BANK)); verifySchema(result, schema("birthdate", null, "timestamp")); - verifyDataRows(result, + verifyDataRows( + result, rows("2017-10-23 00:00:00"), rows("2017-11-20 00:00:00"), rows("2018-06-23 00:00:00"), rows("2018-11-13 23:33:20"), rows("2018-06-27 00:00:00"), rows("2018-08-19 00:00:00"), - rows("2018-08-11 00:00:00") - ); + rows("2018-08-11 00:00:00")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java index 48c489ce10..8a96620fe0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -39,7 +38,8 @@ public void init() throws IOException { public void testHead() throws IOException { JSONObject result = executeQuery(String.format("source=%s | fields firstname, age | head", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -55,11 +55,9 @@ public void testHead() throws IOException { @Test public void testHeadWithNumber() throws IOException { JSONObject result = - executeQuery(String.format("source=%s | fields firstname, age | head 3", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, - rows("Amber", 32), - rows("Hattie", 36), - rows("Nanette", 28)); + executeQuery( + String.format("source=%s | fields firstname, age | head 3", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28)); } @Ignore("Fix https://github.com/opensearch-project/sql/issues/703#issuecomment-1211422130") @@ -67,9 +65,10 @@ public void testHeadWithNumber() throws IOException { public void testHeadWithNumberLargerThanQuerySizeLimit() throws IOException { setQuerySizeLimit(5); JSONObject result = - executeQuery(String.format( - "source=%s | fields firstname, age | head 10", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + executeQuery( + String.format("source=%s | fields firstname, age | head 10", TEST_INDEX_ACCOUNT)); + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -86,9 +85,10 @@ public void testHeadWithNumberLargerThanQuerySizeLimit() throws IOException { public void testHeadWithNumberLargerThanMaxResultWindow() throws IOException { setMaxResultWindow(TEST_INDEX_ACCOUNT, 10); JSONObject result = - executeQuery(String.format( - "source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + executeQuery( + String.format("source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -112,9 +112,10 @@ public void testHeadWithLargeNumber() throws IOException { setQuerySizeLimit(5); setMaxResultWindow(TEST_INDEX_ACCOUNT, 10); JSONObject result = - executeQuery(String.format( - "source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + executeQuery( + String.format("source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -135,10 +136,8 @@ public void testHeadWithLargeNumber() throws IOException { @Test public void testHeadWithNumberAndFrom() throws IOException { JSONObject result = - executeQuery(String.format("source=%s | fields firstname, age | head 3 from 4", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, - rows("Elinor", 36), - rows("Virginia", 39), - rows("Dillard", 34)); + executeQuery( + String.format("source=%s | fields firstname, age | head 3 from 4", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("Elinor", 36), rows("Virginia", 39), rows("Dillard", 34)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java index 37909e4726..cf7cfcdb39 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java @@ -28,10 +28,10 @@ public class InformationSchemaCommandIT extends PPLIntegTestCase { /** - * Integ tests are dependent on self generated metrics in prometheus instance. - * When running individual integ tests there - * is no time for generation of metrics in the test prometheus instance. - * This method gives prometheus time to generate metrics on itself. + * Integ tests are dependent on self generated metrics in prometheus instance. When running + * individual integ tests there is no time for generation of metrics in the test prometheus + * instance. This method gives prometheus time to generate metrics on itself. + * * @throws InterruptedException */ @BeforeClass @@ -42,8 +42,11 @@ protected static void metricGenerationWait() throws InterruptedException { @Override protected void init() throws InterruptedException, IOException { DataSourceMetadata createDSM = - new DataSourceMetadata("my_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "http://localhost:9090")); + new DataSourceMetadata( + "my_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "http://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); @@ -59,8 +62,9 @@ protected void deleteDataSourceMetadata() throws IOException { @Test public void testSearchTablesFromPrometheusCatalog() throws IOException { JSONObject result = - executeQuery("source=my_prometheus.information_schema.tables " - + "| where LIKE(TABLE_NAME, '%http%')"); + executeQuery( + "source=my_prometheus.information_schema.tables " + + "| where LIKE(TABLE_NAME, '%http%')"); this.logger.error(result.toString()); verifyColumn( result, @@ -69,24 +73,53 @@ public void testSearchTablesFromPrometheusCatalog() throws IOException { columnName("TABLE_NAME"), columnName("TABLE_TYPE"), columnName("UNIT"), - columnName("REMARKS") - ); - verifyDataRows(result, - rows("my_prometheus", "default", "promhttp_metric_handler_requests_in_flight", - "gauge", "", "Current number of scrapes being served."), - rows("my_prometheus", "default", "prometheus_sd_http_failures_total", - "counter", "", "Number of HTTP service discovery refresh failures."), - rows("my_prometheus", "default", "promhttp_metric_handler_requests_total", - "counter", "", "Total number of scrapes by HTTP status code."), - rows("my_prometheus", "default", "prometheus_http_request_duration_seconds", - "histogram", "", "Histogram of latencies for HTTP requests."), - rows("my_prometheus", "default", "prometheus_http_requests_total", - "counter", "", "Counter of HTTP requests."), - rows("my_prometheus", "default", "prometheus_http_response_size_bytes", - "histogram", "", "Histogram of response size for HTTP requests.")); + columnName("REMARKS")); + verifyDataRows( + result, + rows( + "my_prometheus", + "default", + "promhttp_metric_handler_requests_in_flight", + "gauge", + "", + "Current number of scrapes being served."), + rows( + "my_prometheus", + "default", + "prometheus_sd_http_failures_total", + "counter", + "", + "Number of HTTP service discovery refresh failures."), + rows( + "my_prometheus", + "default", + "promhttp_metric_handler_requests_total", + "counter", + "", + "Total number of scrapes by HTTP status code."), + rows( + "my_prometheus", + "default", + "prometheus_http_request_duration_seconds", + "histogram", + "", + "Histogram of latencies for HTTP requests."), + rows( + "my_prometheus", + "default", + "prometheus_http_requests_total", + "counter", + "", + "Counter of HTTP requests."), + rows( + "my_prometheus", + "default", + "prometheus_http_response_size_bytes", + "histogram", + "", + "Histogram of response size for HTTP requests.")); } - @Test public void testTablesFromPrometheusCatalog() throws IOException { JSONObject result = @@ -101,15 +134,18 @@ public void testTablesFromPrometheusCatalog() throws IOException { columnName("TABLE_NAME"), columnName("TABLE_TYPE"), columnName("UNIT"), - columnName("REMARKS") - ); - verifyDataRows(result, - rows("my_prometheus", - "default", "prometheus_http_requests_total", - "counter", "", "Counter of HTTP requests.")); + columnName("REMARKS")); + verifyDataRows( + result, + rows( + "my_prometheus", + "default", + "prometheus_http_requests_total", + "counter", + "", + "Counter of HTTP requests.")); } - // Moved this IT from DescribeCommandIT to segregate Datasource Integ Tests. @Test public void testDescribeCommandWithPrometheusCatalog() throws IOException { @@ -120,16 +156,19 @@ public void testDescribeCommandWithPrometheusCatalog() throws IOException { columnName("TABLE_SCHEMA"), columnName("TABLE_NAME"), columnName("COLUMN_NAME"), - columnName("DATA_TYPE") - ); - verifyDataRows(result, + columnName("DATA_TYPE")); + verifyDataRows( + result, rows("my_prometheus", "default", "prometheus_http_requests_total", "handler", "keyword"), rows("my_prometheus", "default", "prometheus_http_requests_total", "code", "keyword"), rows("my_prometheus", "default", "prometheus_http_requests_total", "instance", "keyword"), rows("my_prometheus", "default", "prometheus_http_requests_total", "@value", "double"), - rows("my_prometheus", "default", "prometheus_http_requests_total", "@timestamp", + rows( + "my_prometheus", + "default", + "prometheus_http_requests_total", + "@timestamp", "timestamp"), rows("my_prometheus", "default", "prometheus_http_requests_total", "job", "keyword")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java index 4bf9a37a9f..c14b9baa35 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java @@ -16,9 +16,7 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; -/** - * For backward compatibility, check if legacy API endpoints are accessible. - */ +/** For backward compatibility, check if legacy API endpoints are accessible. */ public class LegacyAPICompatibilityIT extends PPLIntegTestCase { @Override @@ -51,22 +49,20 @@ public void stats() throws IOException { @Test public void legacySettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"opendistro.ppl.query.memory_limit\": \"80%\"" - + " }" - + "}"; + String requestBody = + "{" + + " \"persistent\": {" + + " \"opendistro.ppl.query.memory_limit\": \"80%\"" + + " }" + + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void newSettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"plugins.query.size_limit\": \"100\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"plugins.query.size_limit\": \"100\"" + " }" + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @@ -83,5 +79,4 @@ private RequestOptions.Builder buildJsonOption() { restOptionsBuilder.addHeader("Content-Type", "application/json"); return restOptionsBuilder; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java index 67ad553689..75dd6aa268 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; @@ -23,9 +22,13 @@ public void init() throws IOException { @Test public void test_like_with_percent() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(KeywordBody, 'test wildcard%') | fields KeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(KeywordBody, 'test wildcard%') | fields KeywordBody"; JSONObject result = executeQuery(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard"), rows("test wildcard in the end of the text%"), rows("test wildcard in % the middle of the text"), @@ -37,51 +40,66 @@ public void test_like_with_percent() throws IOException { @Test public void test_like_with_escaped_percent() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(KeywordBody, '\\\\%test wildcard%') | fields KeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(KeywordBody, '\\\\%test wildcard%') | fields KeywordBody"; JSONObject result = executeQuery(query); - verifyDataRows(result, - rows("%test wildcard in the beginning of the text")); + verifyDataRows(result, rows("%test wildcard in the beginning of the text")); } @Test public void test_like_in_where_with_escaped_underscore() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(KeywordBody, '\\\\_test wildcard%') | fields KeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(KeywordBody, '\\\\_test wildcard%') | fields KeywordBody"; JSONObject result = executeQuery(query); - verifyDataRows(result, - rows("_test wildcard in the beginning of the text")); + verifyDataRows(result, rows("_test wildcard in the beginning of the text")); } @Test public void test_like_on_text_field_with_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test*') | fields TextBody"; + String query = + "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test*') | fields TextBody"; JSONObject result = executeQuery(query); assertEquals(9, result.getInt("total")); } @Test public void test_like_on_text_keyword_field_with_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextKeywordBody, 'test*') | fields TextKeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(TextKeywordBody, 'test*') | fields TextKeywordBody"; JSONObject result = executeQuery(query); assertEquals(8, result.getInt("total")); } @Test public void test_like_on_text_keyword_field_with_greater_than_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextKeywordBody, 'test wild*') | fields TextKeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(TextKeywordBody, 'test wild*') | fields TextKeywordBody"; JSONObject result = executeQuery(query); assertEquals(7, result.getInt("total")); } @Test public void test_like_on_text_field_with_greater_than_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test wild*') | fields TextBody"; + String query = + "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test wild*') | fields TextBody"; JSONObject result = executeQuery(query); assertEquals(0, result.getInt("total")); } @Test public void test_convert_field_text_to_keyword() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextKeywordBody, '*') | fields TextKeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(TextKeywordBody, '*') | fields TextKeywordBody"; String result = explainQueryToString(query); assertTrue(result.contains("TextKeywordBody.keyword")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java index 42ba8bea53..67e6fac04d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java @@ -28,9 +28,7 @@ public void valid_query_match_test() throws IOException { "source=%s | where match_bool_prefix(phrase, 'qui') | fields phrase", TEST_INDEX_PHRASE)); - verifyDataRows(result, - rows("quick fox"), - rows("quick fox here")); + verifyDataRows(result, rows("quick fox"), rows("quick fox here")); } @Test @@ -38,12 +36,11 @@ public void optional_parameter_match_test() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | where match_bool_prefix(phrase, '2 tes', minimum_should_match=1, fuzziness=2) | fields phrase", + "source=%s | where match_bool_prefix(phrase, '2 tes', minimum_should_match=1," + + " fuzziness=2) | fields phrase", TEST_INDEX_PHRASE)); - verifyDataRows(result, - rows("my test"), - rows("my test 2")); + verifyDataRows(result, rows("my test"), rows("my test 2")); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java index 808be2334d..908f7a621c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java index 780113de52..5efc2108b9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_PHRASE; @@ -24,18 +23,20 @@ public void init() throws IOException { @Test public void test_match_phrase_function() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where match_phrase(phrase, 'quick fox') | fields phrase", TEST_INDEX_PHRASE)); + executeQuery( + String.format( + "source=%s | where match_phrase(phrase, 'quick fox') | fields phrase", + TEST_INDEX_PHRASE)); verifyDataRows(result, rows("quick fox"), rows("quick fox here")); } @Test public void test_match_phrase_with_slop() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where match_phrase(phrase, 'brown fox', slop = 2) | fields phrase", TEST_INDEX_PHRASE)); + executeQuery( + String.format( + "source=%s | where match_phrase(phrase, 'brown fox', slop = 2) | fields phrase", + TEST_INDEX_PHRASE)); verifyDataRows(result, rows("brown fox"), rows("fox brown")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java index 0f827692a5..91ce1bbd10 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java @@ -24,46 +24,48 @@ public void init() throws IOException { public void required_parameters() throws IOException { String query = "source = %s | WHERE match_phrase_prefix(Title, 'champagne be') | fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, + verifyDataRows( + result, rows("Can old flat champagne be used for vinegar?"), rows("Elder flower champagne best to use natural yeast or add a wine yeast?")); } - @Test public void all_optional_parameters() throws IOException { // The values for optional parameters are valid but arbitrary. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, " + - "zero_terms_query='ALL', max_expansions = 2, analyzer=standard, slop=0) " + - "| fields Title"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, " + + "zero_terms_query='ALL', max_expansions = 2, analyzer=standard, slop=0) " + + "| fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Can old flat champagne be used for vinegar?")); } - @Test public void max_expansions_is_3() throws IOException { // max_expansions applies to the last term in the query -- 'bottl' // It tells OpenSearch to consider only the first 3 terms that start with 'bottl' // In this dataset these are 'bottle-conditioning', 'bottling', 'bottles'. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3) | fields Tags"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3) | fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, rows("brewing draught bottling"), - rows("draught bottles")); + verifyDataRows(result, rows("brewing draught bottling"), rows("draught bottles")); } @Test public void analyzer_english() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // This results in an empty query. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Title, 'in to', analyzer=english)" + - "| fields Title"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Title, 'in to', analyzer=english)" + + "| fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - assertTrue("Expect English analyzer to filter out common words 'in' and 'to'", + assertTrue( + "Expect English analyzer to filter out common words 'in' and 'to'", result.getInt("total") == 0); } @@ -71,9 +73,10 @@ public void analyzer_english() throws IOException { public void analyzer_standard() throws IOException { // Standard analyzer does not treat 'in' and 'to' as special terms. // This results in 'to' being used as a phrase prefix given us 'Tokyo'. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)" + - "| fields Title"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)" + + "| fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Local microbreweries and craft beer in Tokyo")); } @@ -83,21 +86,18 @@ public void zero_term_query_all() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // zero_terms_query of 'ALL' causes all rows to be returned. // ORDER BY ... LIMIT helps make the test understandable. - String query = "source = %s" + - "| WHERE match_phrase_prefix(Title, 'in to', analyzer=english, zero_terms_query='ALL') " + - "| sort -Title | head 1 | fields Title"; + String query = + "source = %s| WHERE match_phrase_prefix(Title, 'in to', analyzer=english," + + " zero_terms_query='ALL') | sort -Title | head 1 | fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("was working great, now all foam")); } - @Test public void slop_is_2() throws IOException { // When slop is 2, the terms are matched exactly in the order specified. // 'open' is used to match prefix of the next term. - String query = "source = %s" + - "| where match_phrase_prefix(Tags, 'gas ta', slop=2) " + - "| fields Tags"; + String query = "source = %s | where match_phrase_prefix(Tags, 'gas ta', slop=2) | fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("taste gas")); } @@ -105,12 +105,8 @@ public void slop_is_2() throws IOException { @Test public void slop_is_3() throws IOException { // When slop is 3, results will include phrases where the query terms are transposed. - String query = "source = %s" + - "| where match_phrase_prefix(Tags, 'gas ta', slop=3)" + - "| fields Tags"; + String query = "source = %s | where match_phrase_prefix(Tags, 'gas ta', slop=3) | fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, - rows("taste draught gas"), - rows("taste gas")); + verifyDataRows(result, rows("taste draught gas"), rows("taste gas")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java index 6dd2d3916f..2d6a52c12b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -31,138 +30,146 @@ public void init() throws IOException { @Test public void testAbs() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = abs(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = abs(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "integer")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testCeil() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = ceil(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = ceil(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testCeiling() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = ceiling(age) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = ceiling(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testE() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = e() | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = e() | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.E), rows(Math.E), rows(Math.E), rows(Math.E), - rows(Math.E), rows(Math.E), rows(Math.E)); + result, + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E)); } @Test public void testExp() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = exp(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = exp(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.exp(32)), rows(Math.exp(36)), rows(Math.exp(28)), rows(Math.exp(33)), - rows(Math.exp(36)), rows(Math.exp(39)), rows(Math.exp(34))); + result, + rows(Math.exp(32)), + rows(Math.exp(36)), + rows(Math.exp(28)), + rows(Math.exp(33)), + rows(Math.exp(36)), + rows(Math.exp(39)), + rows(Math.exp(34))); } @Test public void testFloor() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = floor(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = floor(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testLn() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = ln(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = ln(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.log(32)), rows(Math.log(36)), rows(Math.log(28)), rows(Math.log(33)), - rows(Math.log(36)), rows(Math.log(39)), rows(Math.log(34))); + result, + rows(Math.log(32)), + rows(Math.log(36)), + rows(Math.log(28)), + rows(Math.log(33)), + rows(Math.log(36)), + rows(Math.log(39)), + rows(Math.log(34))); } @Test public void testLogOneArg() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = log(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = log(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); - verifyDataRows(result, - rows(Math.log(28)), rows(Math.log(32)), rows(Math.log(33)), rows(Math.log(34)), - rows(Math.log(36)), rows(Math.log(36)), rows(Math.log(39)) - ); + verifyDataRows( + result, + rows(Math.log(28)), + rows(Math.log(32)), + rows(Math.log(33)), + rows(Math.log(34)), + rows(Math.log(36)), + rows(Math.log(36)), + rows(Math.log(39))); } @Test public void testLogTwoArgs() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = log(age, balance) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = log(age, balance) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, closeTo(Math.log(39225) / Math.log(32)), closeTo(Math.log(5686) / Math.log(36)), - closeTo(Math.log(32838) / Math.log(28)), closeTo(Math.log(4180) / Math.log(33)), - closeTo(Math.log(16418) / Math.log(36)), closeTo(Math.log(40540) / Math.log(39)), + result, + closeTo(Math.log(39225) / Math.log(32)), + closeTo(Math.log(5686) / Math.log(36)), + closeTo(Math.log(32838) / Math.log(28)), + closeTo(Math.log(4180) / Math.log(33)), + closeTo(Math.log(16418) / Math.log(36)), + closeTo(Math.log(40540) / Math.log(39)), closeTo(Math.log(48086) / Math.log(34))); } @Test public void testLog10() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = log10(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = log10(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.log10(32)), rows(Math.log10(36)), rows(Math.log10(28)), - rows(Math.log10(33)), rows(Math.log10(36)), rows(Math.log10(39)), rows(Math.log10(34))); + result, + rows(Math.log10(32)), + rows(Math.log10(36)), + rows(Math.log10(28)), + rows(Math.log10(33)), + rows(Math.log10(36)), + rows(Math.log10(39)), + rows(Math.log10(34))); } @Test public void testLog2() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = log2(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = log2(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( result, - closeTo(Math.log(32) / Math.log(2)), closeTo(Math.log(36) / Math.log(2)), - closeTo(Math.log(28) / Math.log(2)), closeTo(Math.log(33) / Math.log(2)), - closeTo(Math.log(36) / Math.log(2)), closeTo(Math.log(39) / Math.log(2)), + closeTo(Math.log(32) / Math.log(2)), + closeTo(Math.log(36) / Math.log(2)), + closeTo(Math.log(28) / Math.log(2)), + closeTo(Math.log(33) / Math.log(2)), + closeTo(Math.log(36) / Math.log(2)), + closeTo(Math.log(39) / Math.log(2)), closeTo(Math.log(34) / Math.log(2))); } @@ -170,168 +177,178 @@ public void testLog2() throws IOException { public void testConv() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = conv(age, 10, 16) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = conv(age, 10, 16) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "string")); verifyDataRows( - result, rows("20"), rows("24"), rows("1c"), rows("21"), - rows("24"), rows("27"), rows("22")); + result, rows("20"), rows("24"), rows("1c"), rows("21"), rows("24"), rows("27"), rows("22")); } @Test public void testCrc32() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = crc32(firstname) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = crc32(firstname) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); verifyDataRows( - result, rows(324249283), rows(3369714977L), rows(1165568529), rows(2293694493L), - rows(3936131563L), rows(256963594), rows(824319315)); + result, + rows(324249283), + rows(3369714977L), + rows(1165568529), + rows(2293694493L), + rows(3936131563L), + rows(256963594), + rows(824319315)); } @Test public void testMod() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = mod(age, 10) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = mod(age, 10) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "integer")); - verifyDataRows( - result, rows(2), rows(6), rows(8), rows(3), rows(6), rows(9), rows(4)); + verifyDataRows(result, rows(2), rows(6), rows(8), rows(3), rows(6), rows(9), rows(4)); } @Test public void testPow() throws IOException { JSONObject pow = - executeQuery( - String.format( - "source=%s | eval f = pow(age, 2) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = pow(age, 2) | fields f", TEST_INDEX_BANK)); verifySchema(pow, schema("f", null, "double")); verifyDataRows( - pow, rows(1024.0), rows(1296.0), rows(784.0), rows(1089.0), rows(1296.0), rows(1521.0), rows(1156.0)); + pow, + rows(1024.0), + rows(1296.0), + rows(784.0), + rows(1089.0), + rows(1296.0), + rows(1521.0), + rows(1156.0)); JSONObject power = executeQuery( - String.format( - "source=%s | eval f = power(age, 2) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = power(age, 2) | fields f", TEST_INDEX_BANK)); verifySchema(power, schema("f", null, "double")); verifyDataRows( - power, rows(1024.0), rows(1296.0), rows(784.0), rows(1089.0), rows(1296.0), rows(1521.0), rows(1156.0)); - + power, + rows(1024.0), + rows(1296.0), + rows(784.0), + rows(1089.0), + rows(1296.0), + rows(1521.0), + rows(1156.0)); } @Test public void testRound() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = round(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = round(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); result = executeQuery( - String.format( - "source=%s | eval f = round(age, -1) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = round(age, -1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(30), rows(40), rows(30), rows(30), rows(40), rows(40), rows(30)); + verifyDataRows(result, rows(30), rows(40), rows(30), rows(30), rows(40), rows(40), rows(30)); } @Test public void testSign() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = sign(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = sign(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "integer")); - verifyDataRows( - result, rows(1), rows(1), rows(1), rows(1), rows(1), rows(1), rows(1)); + verifyDataRows(result, rows(1), rows(1), rows(1), rows(1), rows(1), rows(1), rows(1)); } @Test public void testSqrt() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = sqrt(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = sqrt(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); - verifyDataRows(result, - rows(5.656854249492381), rows(6.0), rows(5.291502622129181), - rows(5.744562646538029), rows(6.0), rows(6.244997998398398), + verifyDataRows( + result, + rows(5.656854249492381), + rows(6.0), + rows(5.291502622129181), + rows(5.744562646538029), + rows(6.0), + rows(6.244997998398398), rows(5.830951894845301)); } @Test public void testCbrt() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = cbrt(num3) | fields f", TEST_INDEX_CALCS)); + executeQuery(String.format("source=%s | eval f = cbrt(num3) | fields f", TEST_INDEX_CALCS)); verifySchema(result, schema("f", null, "double")); - verifyDataRows(result, - closeTo(Math.cbrt(-11.52)), closeTo(Math.cbrt(-9.31)), closeTo(Math.cbrt(-12.17)), - closeTo(Math.cbrt(-7.25)), closeTo(Math.cbrt(12.93)), closeTo(Math.cbrt(-19.96)), - closeTo(Math.cbrt(10.93)), closeTo(Math.cbrt(3.64)), closeTo(Math.cbrt(-13.38)), - closeTo(Math.cbrt(-10.56)), closeTo(Math.cbrt(-4.79)), closeTo(Math.cbrt(-10.81)), - closeTo(Math.cbrt(-6.62)), closeTo(Math.cbrt(-18.43)), closeTo(Math.cbrt(6.84)), - closeTo(Math.cbrt(-10.98)), closeTo(Math.cbrt(-2.6))); + verifyDataRows( + result, + closeTo(Math.cbrt(-11.52)), + closeTo(Math.cbrt(-9.31)), + closeTo(Math.cbrt(-12.17)), + closeTo(Math.cbrt(-7.25)), + closeTo(Math.cbrt(12.93)), + closeTo(Math.cbrt(-19.96)), + closeTo(Math.cbrt(10.93)), + closeTo(Math.cbrt(3.64)), + closeTo(Math.cbrt(-13.38)), + closeTo(Math.cbrt(-10.56)), + closeTo(Math.cbrt(-4.79)), + closeTo(Math.cbrt(-10.81)), + closeTo(Math.cbrt(-6.62)), + closeTo(Math.cbrt(-18.43)), + closeTo(Math.cbrt(6.84)), + closeTo(Math.cbrt(-10.98)), + closeTo(Math.cbrt(-2.6))); } @Test public void testTruncate() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = truncate(age, 1) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = truncate(age, 1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); result = executeQuery( - String.format( - "source=%s | eval f = truncate(age, -1) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = truncate(age, -1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(30), rows(30), rows(20), rows(30), rows(30), rows(30), rows(30)); + verifyDataRows(result, rows(30), rows(30), rows(20), rows(30), rows(30), rows(30), rows(30)); } @Test public void testPi() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = pi() | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = pi() | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.PI), rows(Math.PI), rows(Math.PI), rows(Math.PI), - rows(Math.PI), rows(Math.PI), rows(Math.PI)); + result, + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI)); } @Test public void testRand() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = rand() | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = rand() | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "float")); result = - executeQuery( - String.format( - "source=%s | eval f = rand(5) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = rand(5) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "float")); } @Test public void testAcos() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = acos(0) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = acos(0) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.acos(0))); } @@ -339,9 +356,7 @@ public void testAcos() throws IOException { @Test public void testAsin() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = asin(1) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = asin(1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.asin(1))); } @@ -349,16 +364,12 @@ public void testAsin() throws IOException { @Test public void testAtan() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = atan(2) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = atan(2) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.atan(2))); result = - executeQuery( - String.format( - "source=%s | eval f = atan(2, 3) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = atan(2, 3) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.atan2(2, 3))); } @@ -366,9 +377,7 @@ public void testAtan() throws IOException { @Test public void testAtan2() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = atan2(2, 3) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = atan2(2, 3) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.atan2(2, 3))); } @@ -376,9 +385,7 @@ public void testAtan2() throws IOException { @Test public void testCos() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = cos(1.57) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = cos(1.57) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.cos(1.57))); } @@ -386,9 +393,7 @@ public void testCos() throws IOException { @Test public void testCot() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = cot(2) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = cot(2) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), closeTo(1 / Math.tan(2))); } @@ -397,8 +402,7 @@ public void testCot() throws IOException { public void testDegrees() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = degrees(1.57) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = degrees(1.57) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.toDegrees(1.57))); } @@ -406,9 +410,7 @@ public void testDegrees() throws IOException { @Test public void testRadians() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = radians(90) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = radians(90) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.toRadians(90))); } @@ -416,9 +418,7 @@ public void testRadians() throws IOException { @Test public void testSin() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = sin(1.57) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = sin(1.57) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.sin(1.57))); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java index 41373afdc6..73882a4036 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.hamcrest.Matchers.equalTo; @@ -44,9 +43,7 @@ private void multiQueries(int n) throws IOException { } private Request makeStatRequest() { - return new Request( - "GET", "/_plugins/_ppl/stats" - ); + return new Request("GET", "/_plugins/_ppl/stats"); } private int pplRequestTotal() throws IOException { @@ -70,5 +67,4 @@ private String executeStatRequest(final Request request) throws IOException { return sb.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java index 6562c551da..8fc043d32d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; @@ -21,36 +20,41 @@ public void init() throws IOException { @Test public void test_multi_match() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste') | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste') | fields Id"; var result = executeQuery(query); assertEquals(16, result.getInt("total")); } @Test public void test_multi_match_all_params() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match(['Body', Tags], 'taste beer', operator='and', analyzer=english," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, cutoff_frequency=0.33," - + "fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient = true, max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', prefix_length = 7, tie_breaker = 0.3," - + "type = most_fields, slop = 2, zero_terms_query = 'ALL') | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE multi_match(['Body', Tags], 'taste beer', operator='and'," + + " analyzer=english,auto_generate_synonyms_phrase_query=true, boost = 0.77," + + " cutoff_frequency=0.33,fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient" + + " = true, max_expansions = 25,minimum_should_match = '2<-25% 9<-3', prefix_length =" + + " 7, tie_breaker = 0.3,type = most_fields, slop = 2, zero_terms_query = 'ALL') |" + + " fields Id"; var result = executeQuery(query); assertEquals(10, result.getInt("total")); } @Test public void test_wildcard_multi_match() throws IOException { - String query1 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match(['Tags'], 'taste') | fields Id"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match(['Tags'], 'taste') | fields Id"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match(['T*'], 'taste') | fields Id"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match(['T*'], 'taste') | fields Id"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "source=" + TEST_INDEX_BEER - + " | where simple_query_string(['*Date'], '2014-01-22')"; + String query3 = + "source=" + TEST_INDEX_BEER + " | where simple_query_string(['*Date'], '2014-01-22')"; JSONObject result3 = executeQuery(query3); assertEquals(10, result3.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java index a330614d21..2d94dc6a3b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java @@ -72,8 +72,7 @@ public NowLikeFunctionIT( @Name("constValue") Boolean constValue, @Name("referenceGetter") Supplier referenceGetter, @Name("parser") BiFunction parser, - @Name("serializationPatternStr") String serializationPatternStr - ) { + @Name("serializationPatternStr") String serializationPatternStr) { this.name = name; this.hasFsp = hasFsp; this.hasShortcut = hasShortcut; @@ -85,56 +84,104 @@ public NowLikeFunctionIT( @ParametersFactory(argumentFormatting = "%1$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("now", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("current_timestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtimestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtime", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("sysdate", true, false, false, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("curtime", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("current_time", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("curdate", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("current_date", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_date", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalDate()), - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_time", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalTime()), - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("utc_timestamp", false, false, true, - (Supplier) (org.opensearch.sql.sql.NowLikeFunctionIT::utcDateTimeNow), - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss") - )); + return Arrays.asList( + $$( + $( + "now", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "current_timestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtimestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtime", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "sysdate", + true, + false, + false, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "curtime", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "current_time", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "curdate", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "current_date", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_date", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalDate()), + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_time", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalTime()), + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "utc_timestamp", + false, + false, + true, + (Supplier) (org.opensearch.sql.sql.NowLikeFunctionIT::utcDateTimeNow), + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"))); } private long getDiff(Temporal sample, Temporal reference) { @@ -146,7 +193,8 @@ private long getDiff(Temporal sample, Temporal reference) { @Test public void testNowLikeFunctions() throws IOException { - var serializationPattern = new DateTimeFormatterBuilder() + var serializationPattern = + new DateTimeFormatterBuilder() .appendPattern(serializationPatternStr) .optionalStart() .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true) @@ -156,42 +204,57 @@ public void testNowLikeFunctions() throws IOException { double delta = 2d; // acceptable time diff, secs if (reference instanceof LocalDate) delta = 1d; // Max date delta could be 1 if test runs on the very edge of two days - // We ignore probability of a test run on edge of month or year to simplify the checks + // We ignore probability of a test run on edge of month or year to simplify the checks - var calls = new ArrayList() {{ - add(name + "()"); - }}; - if (hasShortcut) - calls.add(name); - if (hasFsp) - calls.add(name + "(0)"); + var calls = + new ArrayList() { + { + add(name + "()"); + } + }; + if (hasShortcut) calls.add(name); + if (hasFsp) calls.add(name + "(0)"); // Column order is: func(), func, func(0) // shortcut ^ fsp ^ // Query looks like: // source=people2 | eval `now()`=now() | fields `now()`; - JSONObject result = executeQuery("source=" + TEST_INDEX_PEOPLE2 - + " | eval " + calls.stream().map(c -> String.format("`%s`=%s", c, c)).collect(Collectors.joining(",")) - + " | fields " + calls.stream().map(c -> String.format("`%s`", c)).collect(Collectors.joining(","))); + JSONObject result = + executeQuery( + "source=" + + TEST_INDEX_PEOPLE2 + + " | eval " + + calls.stream() + .map(c -> String.format("`%s`=%s", c, c)) + .collect(Collectors.joining(",")) + + " | fields " + + calls.stream() + .map(c -> String.format("`%s`", c)) + .collect(Collectors.joining(","))); var rows = result.getJSONArray("datarows"); JSONArray firstRow = rows.getJSONArray(0); for (int i = 0; i < rows.length(); i++) { var row = rows.getJSONArray(i); - if (constValue) - assertTrue(firstRow.similar(row)); + if (constValue) assertTrue(firstRow.similar(row)); int column = 0; - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); if (hasShortcut) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); } if (hasFsp) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column), serializationPattern)), + delta); } } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java index 6178552728..d4d09c9af1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.SQLIntegTestCase.Index.DEEP_NESTED; @@ -26,55 +25,44 @@ public void init() throws IOException { @Test public void select_object_field() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | " - + "fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("city.name", "string"), - schema("city.location.latitude", "double")); - verifyDataRows(result, - rows("Seattle", 10.5)); + JSONObject result = + executeQuery( + String.format( + "source=%s | fields city.name, city.location.latitude", TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); + verifyDataRows(result, rows("Seattle", 10.5)); } @Test public void compare_object_field_in_where() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s " - + "| where city.name = 'Seattle' " - + "| fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("city.name", "string"), - schema("city.location.latitude", "double")); - verifyDataRows(result, - rows("Seattle", 10.5)); + JSONObject result = + executeQuery( + String.format( + "source=%s " + + "| where city.name = 'Seattle' " + + "| fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); + verifyDataRows(result, rows("Seattle", 10.5)); } @Test public void group_object_field_in_stats() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s " - + "| stats count() by city.name", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("count()", "integer"), - schema("city.name", "string")); - verifyDataRows(result, - rows(1, "Seattle")); + JSONObject result = + executeQuery( + String.format("source=%s | stats count() by city.name", TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("count()", "integer"), schema("city.name", "string")); + verifyDataRows(result, rows(1, "Seattle")); } @Test public void sort_by_object_field() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s " - + "| sort city.name" - + "| fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("city.name", "string"), - schema("city.location.latitude", "double")); - verifyDataRows(result, - rows("Seattle", 10.5)); + JSONObject result = + executeQuery( + String.format( + "source=%s | sort city.name | fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); + verifyDataRows(result, rows("Seattle", 10.5)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java index e6ca958991..42ed08b00c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -26,40 +25,28 @@ public void init() throws IOException { @Test public void testAddOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age = 31 + 1 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age = 31 + 1 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testSubtractOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age = 33 - 1 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age = 33 - 1 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testMultiplyOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age = 16 * 2 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age = 16 * 2 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testDivideOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age / 2 = 16 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age / 2 = 16 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32), rows(33)); } @@ -67,9 +54,7 @@ public void testDivideOperator() throws IOException { public void testModuleOperator() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | where age %s 32 = 0 | fields age", - TEST_INDEX_BANK, "%")); + String.format("source=%s | where age %s 32 = 0 | fields age", TEST_INDEX_BANK, "%")); verifyDataRows(result, rows(32)); } @@ -78,11 +63,9 @@ public void testArithmeticOperatorWithNullValue() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | eval f = age + 0 | fields f", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + "source=%s | eval f = age + 0 | fields f", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows( - result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(JSONObject.NULL), - rows(34)); + result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(JSONObject.NULL), rows(34)); } @Test @@ -92,8 +75,14 @@ public void testArithmeticOperatorWithMissingValue() throws IOException { String.format( "source=%s | eval f = balance * 1 | fields f", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows( - result, rows(39225), rows(32838), rows(4180), rows(48086), rows(JSONObject.NULL), - rows(JSONObject.NULL), rows(JSONObject.NULL)); + result, + rows(39225), + rows(32838), + rows(4180), + rows(48086), + rows(JSONObject.NULL), + rows(JSONObject.NULL), + rows(JSONObject.NULL)); } @Test @@ -101,8 +90,7 @@ public void testMultipleArithmeticOperators() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | where (age+2) * 3 / 2 - 1 = 50 | fields age", - TEST_INDEX_BANK)); + "source=%s | where (age+2) * 3 / 2 - 1 = 50 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @@ -127,14 +115,12 @@ public void testAndOperator() throws IOException { public void testOrOperator() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | where age=32 or age=34 | fields age", TEST_INDEX_BANK)); + String.format("source=%s | where age=32 or age=34 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32), rows(34)); result = executeQuery( - String.format( - "source=%s | where age=34 or age=32| fields age", TEST_INDEX_BANK)); + String.format("source=%s | where age=34 or age=32| fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32), rows(34)); } @@ -158,92 +144,64 @@ public void testXorOperator() throws IOException { @Test public void testNotOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s not age > 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s not age > 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(32)); } @Test public void testEqualOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age = 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age = 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); - result = - executeQuery( - String.format( - "source=%s 32 = age | fields age", - TEST_INDEX_BANK)); + result = executeQuery(String.format("source=%s 32 = age | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testNotEqualOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age != 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age != 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(33), rows(34), rows(36), rows(36), rows(39)); - result = - executeQuery( - String.format( - "source=%s 32 != age | fields age", - TEST_INDEX_BANK)); + result = executeQuery(String.format("source=%s 32 != age | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(33), rows(34), rows(36), rows(36), rows(39)); } @Test public void testLessOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age < 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age < 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28)); } @Test public void testLteOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age <= 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age <= 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(32)); } @Test public void testGreaterOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age > 36 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age > 36 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(39)); } @Test public void testGteOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age >= 36 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age >= 36 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(36), rows(36), rows(39)); } @Test public void testLikeFunction() throws IOException { JSONObject result = - executeQuery(String.format("source=%s like(firstname, 'Hatti_') | fields firstname", - TEST_INDEX_BANK)); + executeQuery( + String.format( + "source=%s like(firstname, 'Hatti_') | fields firstname", TEST_INDEX_BANK)); verifyDataRows(result, rows("Hattie")); } @@ -251,8 +209,8 @@ public void testLikeFunction() throws IOException { public void testBinaryPredicateWithNullValue() throws IOException { JSONObject result = executeQuery( - String.format("source=%s | where age >= 36 | fields age", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + String.format( + "source=%s | where age >= 36 | fields age", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows(result, rows(36), rows(36)); } @@ -260,7 +218,8 @@ public void testBinaryPredicateWithNullValue() throws IOException { public void testBinaryPredicateWithMissingValue() throws IOException { JSONObject result = executeQuery( - String.format("source=%s | where balance > 40000 | fields balance", + String.format( + "source=%s | where balance > 40000 | fields balance", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows(result, rows(48086)); } @@ -269,12 +228,13 @@ private void queryExecutionShouldThrowExceptionDueToNullOrMissingValue( String query, String... errorMsgs) { try { executeQuery(query); - fail("Expected to throw ExpressionEvaluationException, but none was thrown for query: " - + query); + fail( + "Expected to throw ExpressionEvaluationException, but none was thrown for query: " + + query); } catch (ResponseException e) { String errorMsg = e.getMessage(); assertTrue(errorMsg.contains("ExpressionEvaluationException")); - for (String msg: errorMsgs) { + for (String msg : errorMsgs) { assertTrue(errorMsg.contains(msg)); } } catch (IOException e) { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java index bcf183e9c6..459788021d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestUtils.getResponseBody; @@ -20,9 +19,7 @@ import org.opensearch.client.Response; import org.opensearch.sql.legacy.SQLIntegTestCase; -/** - * OpenSearch Rest integration test base for PPL testing. - */ +/** OpenSearch Rest integration test base for PPL testing. */ public abstract class PPLIntegTestCase extends SQLIntegTestCase { protected JSONObject executeQuery(String query) throws IOException { @@ -42,8 +39,10 @@ protected String explainQueryToString(String query) throws IOException { } protected String executeCsvQuery(String query, boolean sanitize) throws IOException { - Request request = buildRequest(query, - QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&sanitize=%b", sanitize)); + Request request = + buildRequest( + query, + QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&sanitize=%b", sanitize)); Response response = client().performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); return getResponseBody(response, true); @@ -65,8 +64,9 @@ protected Request buildRequest(String query, String endpoint) { protected static JSONObject updateClusterSettings(ClusterSetting setting) throws IOException { Request request = new Request("PUT", "/_cluster/settings"); - String persistentSetting = String.format(Locale.ROOT, - "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); + String persistentSetting = + String.format( + Locale.ROOT, "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); request.setJsonEntity(persistentSetting); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); restOptionsBuilder.addHeader("Content-Type", "application/json"); @@ -91,11 +91,7 @@ SQLIntegTestCase.ClusterSetting nullify() { @Override public String toString() { - return "ClusterSetting{" - + "type='" + type + '\'' - + ", path='" + name + '\'' - + ", value='" + value + '\'' - + '}'; + return String.format("ClusterSetting{type='%s', path='%s', value'%s'}", type, name, value); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java index df7b464118..0c638be1e7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.hamcrest.Matchers.equalTo; @@ -28,8 +27,7 @@ import org.opensearch.sql.util.TestUtils; public class PPLPluginIT extends PPLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); private static final String PERSISTENT = "persistent"; @@ -86,9 +84,11 @@ public void sqlEnableSettingsTest() throws IOException { assertThat(result.getInt("status"), equalTo(400)); JSONObject error = result.getJSONObject("error"); assertThat(error.getString("reason"), equalTo("Invalid Query")); - assertThat(error.getString("details"), equalTo( - "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is " - + "false")); + assertThat( + error.getString("details"), + equalTo( + "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is " + + "false")); assertThat(error.getString("type"), equalTo("IllegalAccessException")); // reset the setting diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java index 36fcb4bf3b..7f25f6f160 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -23,9 +22,10 @@ public void init() throws IOException { @Test public void testParseCommand() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | parse email '.+@(?.+)' | fields email, host", - TEST_INDEX_BANK)); + JSONObject result = + executeQuery( + String.format( + "source=%s | parse email '.+@(?.+)' | fields email, host", TEST_INDEX_BANK)); verifyOrder( result, rows("amberduke@pyrami.com", "pyrami.com"), @@ -39,8 +39,10 @@ public void testParseCommand() throws IOException { @Test public void testParseCommandReplaceOriginalField() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | parse email '.+@(?.+)' | fields email", TEST_INDEX_BANK)); + JSONObject result = + executeQuery( + String.format( + "source=%s | parse email '.+@(?.+)' | fields email", TEST_INDEX_BANK)); verifyOrder( result, rows("pyrami.com"), @@ -54,8 +56,12 @@ public void testParseCommandReplaceOriginalField() throws IOException { @Test public void testParseCommandWithOtherRunTimeFields() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | parse email '.+@(?.+)' | " - + "eval eval_result=1 | fields host, eval_result", TEST_INDEX_BANK)); + JSONObject result = + executeQuery( + String.format( + "source=%s | parse email '.+@(?.+)' | " + + "eval eval_result=1 | fields host, eval_result", + TEST_INDEX_BANK)); verifyOrder( result, rows("pyrami.com", 1), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java index 59aade8bbd..a7f638b3dd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java @@ -13,87 +13,83 @@ import org.junit.Test; public class PositionFunctionIT extends PPLIntegTestCase { - @Override - public void init() throws IOException { - loadIndex(Index.CALCS); - } - - @Test - public void test_position_function() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position('ON', str1) | fields f"; - - var result = executeQuery(query); - - assertEquals(17, result.getInt("total")); - verifyDataRows(result, - rows(7), rows(7), - rows(2), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0)); - } - - @Test - public void test_position_function_with_fields_only() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position(str3 IN str2) | where str2 IN ('one', 'two', 'three')| fields f"; - - var result = executeQuery(query); - - assertEquals(3, result.getInt("total")); - verifyDataRows(result, rows(3), rows(0), rows(4)); - } - - @Test - public void test_position_function_with_string_literals() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position('world' IN 'hello world') | where str2='one' | fields f"; - - var result = executeQuery(query); - - assertEquals(1, result.getInt("total")); - verifyDataRows(result, rows(7)); - } - - @Test - public void test_position_function_with_nulls() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position('ee' IN str2) | where isnull(str2) | fields str2,f"; - - var result = executeQuery(query); - - assertEquals(4, result.getInt("total")); - verifyDataRows(result, - rows(null, null), - rows(null, null), - rows(null, null), - rows(null, null)); - } - - @Test - public void test_position_function_with_function_as_arg() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position(upper(str3) IN str1) | where like(str1, 'BINDING SUPPLIES') | fields f"; - - var result = executeQuery(query); - - assertEquals(1, result.getInt("total")); - verifyDataRows(result, rows(15)); - } - - @Test - public void test_position_function_with_function_in_where_clause() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | where position(str3 IN str2)=1 | fields str2"; - - var result = executeQuery(query); - - assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("eight"), rows("eleven")); - } + @Override + public void init() throws IOException { + loadIndex(Index.CALCS); + } + + @Test + public void test_position_function() throws IOException { + String query = "source=" + TEST_INDEX_CALCS + " | eval f=position('ON', str1) | fields f"; + + var result = executeQuery(query); + + assertEquals(17, result.getInt("total")); + verifyDataRows( + result, rows(7), rows(7), rows(2), rows(0), rows(0), rows(0), rows(0), rows(0), rows(0), + rows(0), rows(0), rows(0), rows(0), rows(0), rows(0), rows(0), rows(0)); + } + + @Test + public void test_position_function_with_fields_only() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position(str3 IN str2) | where str2 IN ('one', 'two', 'three')| fields f"; + + var result = executeQuery(query); + + assertEquals(3, result.getInt("total")); + verifyDataRows(result, rows(3), rows(0), rows(4)); + } + + @Test + public void test_position_function_with_string_literals() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position('world' IN 'hello world') | where str2='one' | fields f"; + + var result = executeQuery(query); + + assertEquals(1, result.getInt("total")); + verifyDataRows(result, rows(7)); + } + + @Test + public void test_position_function_with_nulls() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position('ee' IN str2) | where isnull(str2) | fields str2,f"; + + var result = executeQuery(query); + + assertEquals(4, result.getInt("total")); + verifyDataRows(result, rows(null, null), rows(null, null), rows(null, null), rows(null, null)); + } + + @Test + public void test_position_function_with_function_as_arg() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position(upper(str3) IN str1) | where like(str1, 'BINDING SUPPLIES') |" + + " fields f"; + + var result = executeQuery(query); + + assertEquals(1, result.getInt("total")); + verifyDataRows(result, rows(15)); + } + + @Test + public void test_position_function_with_function_in_where_clause() throws IOException { + String query = "source=" + TEST_INDEX_CALCS + " | where position(str3 IN str2)=1 | fields str2"; + + var result = executeQuery(query); + + assertEquals(2, result.getInt("total")); + verifyDataRows(result, rows("eight"), rows("eleven")); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java index 011f91eed5..8d72f02e29 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java @@ -40,10 +40,10 @@ public class PrometheusDataSourceCommandsIT extends PPLIntegTestCase { /** - * Integ tests are dependent on self generated metrics in prometheus instance. - * When running individual integ tests there - * is no time for generation of metrics in the test prometheus instance. - * This method gives prometheus time to generate metrics on itself. + * Integ tests are dependent on self generated metrics in prometheus instance. When running + * individual integ tests there is no time for generation of metrics in the test prometheus + * instance. This method gives prometheus time to generate metrics on itself. + * * @throws InterruptedException */ @BeforeClass @@ -54,8 +54,11 @@ protected static void metricGenerationWait() throws InterruptedException { @Override protected void init() throws InterruptedException, IOException { DataSourceMetadata createDSM = - new DataSourceMetadata("my_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "http://localhost:9090")); + new DataSourceMetadata( + "my_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "http://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); @@ -71,15 +74,15 @@ protected void deleteDataSourceMetadata() throws IOException { @Test @SneakyThrows public void testSourceMetricCommand() { - JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total"); - verifySchema(response, + JSONObject response = executeQuery("source=my_prometheus.prometheus_http_requests_total"); + verifySchema( + response, schema(VALUE, "double"), - schema(TIMESTAMP, "timestamp"), - schema("handler", "string"), - schema("code", "string"), - schema("instance", "string"), - schema("job", "string")); + schema(TIMESTAMP, "timestamp"), + schema("handler", "string"), + schema("code", "string"), + schema("instance", "string"), + schema("job", "string")); Assertions.assertTrue(response.getInt("size") > 0); Assertions.assertEquals(6, response.getJSONArray("datarows").getJSONArray(0).length()); JSONArray firstRow = response.getJSONArray("datarows").getJSONArray(0); @@ -93,19 +96,20 @@ public void testSourceMetricCommand() { @SneakyThrows public void testSourceMetricCommandWithTimestamp() { SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - String query = "source=my_prometheus.prometheus_http_requests_total | where @timestamp > '" - + format.format(new Date(System.currentTimeMillis() - 3600 * 1000)) - + "' | sort + @timestamp | head 5"; + String query = + "source=my_prometheus.prometheus_http_requests_total | where @timestamp > '" + + format.format(new Date(System.currentTimeMillis() - 3600 * 1000)) + + "' | sort + @timestamp | head 5"; - JSONObject response = - executeQuery(query); - verifySchema(response, + JSONObject response = executeQuery(query); + verifySchema( + response, schema(VALUE, "double"), - schema(TIMESTAMP, "timestamp"), - schema("handler", "string"), - schema("code", "string"), - schema("instance", "string"), - schema("job", "string")); + schema(TIMESTAMP, "timestamp"), + schema("handler", "string"), + schema("code", "string"), + schema("instance", "string"), + schema("job", "string")); // Currently, data is not injected into prometheus, // so asserting on result is not possible. Verifying only schema. } @@ -114,9 +118,12 @@ public void testSourceMetricCommandWithTimestamp() { @SneakyThrows public void testMetricAvgAggregationCommand() { JSONObject response = - executeQuery("source=`my_prometheus`.`prometheus_http_requests_total` | stats avg(@value) as `agg` by span(@timestamp, 15s), `handler`, `job`"); - verifySchema(response, - schema("agg", "double"), + executeQuery( + "source=`my_prometheus`.`prometheus_http_requests_total` | stats avg(@value) as `agg`" + + " by span(@timestamp, 15s), `handler`, `job`"); + verifySchema( + response, + schema("agg", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -133,9 +140,12 @@ public void testMetricAvgAggregationCommand() { @SneakyThrows public void testMetricAvgAggregationCommandWithAlias() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats avg(@value) as agg by span(@timestamp, 15s), `handler`, job"); - verifySchema(response, - schema("agg", "double"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats avg(@value) as agg by" + + " span(@timestamp, 15s), `handler`, job"); + verifySchema( + response, + schema("agg", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -148,15 +158,15 @@ public void testMetricAvgAggregationCommandWithAlias() { } } - @Test @SneakyThrows public void testMetricMaxAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats max(@value) by span(@timestamp, 15s)"); - verifySchema(response, - schema("max(@value)", "double"), - schema("span(@timestamp,15s)", "timestamp")); + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats max(@value) by" + + " span(@timestamp, 15s)"); + verifySchema( + response, schema("max(@value)", "double"), schema("span(@timestamp,15s)", "timestamp")); Assertions.assertTrue(response.getInt("size") > 0); Assertions.assertEquals(2, response.getJSONArray("datarows").getJSONArray(0).length()); JSONArray firstRow = response.getJSONArray("datarows").getJSONArray(0); @@ -166,14 +176,16 @@ public void testMetricMaxAggregationCommand() { } } - @Test @SneakyThrows public void testMetricMinAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats min(@value) by span(@timestamp, 15s), handler"); - verifySchema(response, - schema("min(@value)", "double"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats min(@value) by" + + " span(@timestamp, 15s), handler"); + verifySchema( + response, + schema("min(@value)", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string")); Assertions.assertTrue(response.getInt("size") > 0); @@ -189,9 +201,12 @@ public void testMetricMinAggregationCommand() { @SneakyThrows public void testMetricCountAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats count() by span(@timestamp, 15s), handler, job"); - verifySchema(response, - schema("count()", "integer"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats count() by" + + " span(@timestamp, 15s), handler, job"); + verifySchema( + response, + schema("count()", "integer"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -208,9 +223,12 @@ public void testMetricCountAggregationCommand() { @SneakyThrows public void testMetricSumAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats sum(@value) by span(@timestamp, 15s), handler, job"); - verifySchema(response, - schema("sum(@value)", "double"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats sum(@value) by" + + " span(@timestamp, 15s), handler, job"); + verifySchema( + response, + schema("sum(@value)", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -223,18 +241,21 @@ public void testMetricSumAggregationCommand() { } } - @Test @SneakyThrows public void testQueryRange() { long currentTimestamp = new Date().getTime(); JSONObject response = - executeQuery("source=my_prometheus.query_range('prometheus_http_requests_total'," - + ((currentTimestamp/1000)-3600) + "," + currentTimestamp/1000 + ", " + "'14'" + ")" ); - verifySchema(response, - schema(LABELS, "struct"), - schema(VALUE, "array"), - schema(TIMESTAMP, "array")); + executeQuery( + "source=my_prometheus.query_range('prometheus_http_requests_total'," + + ((currentTimestamp / 1000) - 3600) + + "," + + currentTimestamp / 1000 + + ", " + + "'14'" + + ")"); + verifySchema( + response, schema(LABELS, "struct"), schema(VALUE, "array"), schema(TIMESTAMP, "array")); Assertions.assertTrue(response.getInt("size") > 0); } @@ -243,25 +264,23 @@ public void explainQueryRange() throws Exception { String expected = loadFromFile("expectedOutput/ppl/explain_query_range.json"); assertJsonEquals( expected, - explainQueryToString("source = my_prometheus" - + ".query_range('prometheus_http_requests_total',1689281439,1689291439,14)") - ); + explainQueryToString( + "source = my_prometheus" + + ".query_range('prometheus_http_requests_total',1689281439,1689291439,14)")); } - @Test + @Test public void testExplainForQueryExemplars() throws Exception { String expected = loadFromFile("expectedOutput/ppl/explain_query_exemplars.json"); assertJsonEquals( expected, - explainQueryToString("source = my_prometheus." - + "query_exemplars('app_ads_ad_requests_total',1689228292,1689232299)") - ); + explainQueryToString( + "source = my_prometheus." + + "query_exemplars('app_ads_ad_requests_total',1689228292,1689232299)")); } String loadFromFile(String filename) throws Exception { URI uri = Resources.getResource(filename).toURI(); return new String(Files.readAllBytes(Paths.get(uri))); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java index 422cc92cd2..80a89ed9c3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -79,10 +78,7 @@ public void queryShouldBeCaseInsensitiveInKeywords() { queryShouldPassSyntaxAndSemanticCheck(query); } - /** - * Commands that fail syntax analysis should throw - * {@link SyntaxCheckException}. - */ + /** Commands that fail syntax analysis should throw {@link SyntaxCheckException}. */ @Test public void queryNotStartingWithSearchCommandShouldFailSyntaxCheck() { String query = "fields firstname"; @@ -107,14 +103,12 @@ public void unsupportedAggregationFunctionShouldFailSyntaxCheck() { queryShouldThrowSyntaxException(query, "Failed to parse query due to offending symbol"); } - /** - * Commands that fail semantic analysis should throw {@link SemanticCheckException}. - */ + /** Commands that fail semantic analysis should throw {@link SemanticCheckException}. */ @Test public void nonexistentFieldShouldFailSemanticCheck() { String query = String.format("search source=%s | fields name", TEST_INDEX_ACCOUNT); - queryShouldThrowSemanticException(query, "can't resolve Symbol(namespace=FIELD_NAME, " - + "name=name) in type env"); + queryShouldThrowSemanticException( + query, "can't resolve Symbol(namespace=FIELD_NAME, name=name) in type env"); } private void queryShouldPassSyntaxAndSemanticCheck(String query) { @@ -134,7 +128,7 @@ private void queryShouldThrowSyntaxException(String query, String... messages) { } catch (ResponseException e) { String errorMsg = e.getMessage(); assertTrue(errorMsg.contains("SyntaxCheckException")); - for (String msg: messages) { + for (String msg : messages) { assertTrue(errorMsg.contains(msg)); } } catch (IOException e) { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java index 4ace407d72..42a637ead7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java @@ -27,22 +27,29 @@ public void all_fields_test() throws IOException { @Test public void mandatory_params_test() throws IOException { - String query = "source=" + TEST_INDEX_BEER + " | where query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; + String query = + "source=" + + TEST_INDEX_BEER + + " | where query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; JSONObject result = executeQuery(query); assertEquals(16, result.getInt("total")); } @Test public void all_params_test() throws IOException { - String query = "source=" + TEST_INDEX_BEER + " | where query_string(['Body', Tags, Title], 'taste beer'," - + "allow_leading_wildcard=true, enable_position_increments=true, escape=false," - + "fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states = 10000," - + "analyzer='english', analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77," - + "quote_analyzer='standard', phrase_slop=0, rewrite='constant_score', type='best_fields'," - + "tie_breaker=0.3, time_zone='Canada/Pacific', default_operator='or'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7)"; + String query = + "source=" + + TEST_INDEX_BEER + + " | where query_string(['Body', Tags, Title], 'taste" + + " beer',allow_leading_wildcard=true, enable_position_increments=true," + + " escape=false,fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states" + + " = 10000,analyzer='english', analyze_wildcard = false, quote_field_suffix =" + + " '.exact',auto_generate_synonyms_phrase_query=true, boost =" + + " 0.77,quote_analyzer='standard', phrase_slop=0, rewrite='constant_score'," + + " type='best_fields',tie_breaker=0.3, time_zone='Canada/Pacific'," + + " default_operator='or',fuzzy_transpositions = false, lenient = true," + + " fuzzy_max_expansions = 25,minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length" + + " = 7)"; JSONObject result = executeQuery(query); assertEquals(49, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java index f65941b8f7..e3ed1661cd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -30,12 +29,8 @@ public void afterTest() throws IOException { @Test public void testRareWithoutGroup() throws IOException { - JSONObject result = - executeQuery(String.format("source=%s | rare gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("F"), - rows("M")); + JSONObject result = executeQuery(String.format("source=%s | rare gender", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("F"), rows("M")); } @Test @@ -65,6 +60,4 @@ public void testRareWithGroup() throws IOException { rows("M", "KY"), rows("M", "IN")); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java index 7c57bd5481..8e6614dfed 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java @@ -18,11 +18,11 @@ public void init() throws IOException { @Test public void test_wildcard_simple_query_string() throws IOException { - String query1 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); } @@ -33,11 +33,15 @@ public void test_wildcard_simple_query_string() throws IOException { */ @Test public void verify_flags_in_simple_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; + String query1 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; + String query2 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); @@ -53,11 +57,11 @@ public void verify_flags_in_simple_query_string() throws IOException { */ @Test public void verify_escape_in_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=true);"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=true);"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=false);"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=false);"; var result2 = executeQuery(query2); assertEquals(0, result1.getInt("total")); assertEquals(8, result2.getInt("total")); @@ -70,11 +74,15 @@ public void verify_escape_in_query_string() throws IOException { */ @Test public void verify_default_operator_in_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE query_string([Title], 'beer taste', default_operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE query_string([Title], 'beer taste', default_operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -82,11 +90,15 @@ public void verify_default_operator_in_query_string() throws IOException { @Test public void verify_default_operator_in_simple_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -94,11 +106,11 @@ public void verify_default_operator_in_simple_query_string() throws IOException @Test public void verify_default_operator_in_multi_match() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='OR')"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='AND')"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -106,11 +118,11 @@ public void verify_default_operator_in_multi_match() throws IOException { @Test public void verify_operator_in_match() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='OR')"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='AND')"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java index ad1add4e12..ae06e75a06 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -43,7 +42,9 @@ public void testRenameMultiField() throws IOException { verifyColumn(result, columnName("FIRSTNAME"), columnName("AGE")); } - @Ignore("Wildcard is unsupported yet. Enable once https://github.com/opensearch-project/sql/issues/787 is resolved.") + @Ignore( + "Wildcard is unsupported yet. Enable once" + + " https://github.com/opensearch-project/sql/issues/787 is resolved.") @Test public void testRenameWildcardFields() throws IOException { JSONObject result = executeQuery("source=" + TEST_INDEX_ACCOUNT + " | rename %name as %NAME"); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java index e608e94512..56b54ba748 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DOG; @@ -31,11 +30,11 @@ public void queryExceedResourceLimitShouldFail() throws IOException { new ClusterSetting("persistent", Settings.Key.QUERY_MEMORY_LIMIT.getKeyValue(), "1%")); String query = String.format("search source=%s age=20", TEST_INDEX_DOG); - ResponseException exception = - expectThrows(ResponseException.class, () -> executeQuery(query)); + ResponseException exception = expectThrows(ResponseException.class, () -> executeQuery(query)); assertEquals(503, exception.getResponse().getStatusLine().getStatusCode()); - assertThat(exception.getMessage(), Matchers.containsString("resource is not enough to run the" - + " query, quit.")); + assertThat( + exception.getMessage(), + Matchers.containsString("resource is not enough to run the" + " query, quit.")); // update plugins.ppl.query.memory_limit to default value 85% updateClusterSettings( diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java index 2e62b464bb..5d1b0203d7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java index d012cce9e8..224afde4c5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -26,17 +25,13 @@ public void init() throws IOException { public void testQuerySizeLimit() throws IOException { // Default setting, fetch 200 rows from source JSONObject result = - executeQuery( - String.format( - "search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); + executeQuery(String.format("search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); verifyDataRows(result, rows("Hattie"), rows("Elinor"), rows("Virginia")); // Fetch 1 rows from source setQuerySizeLimit(1); result = - executeQuery( - String.format( - "search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); + executeQuery(String.format("search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); verifyDataRows(result, rows("Hattie")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java index 4845d30033..c9c4854212 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java @@ -28,10 +28,10 @@ public class ShowDataSourcesCommandIT extends PPLIntegTestCase { /** - * Integ tests are dependent on self generated metrics in prometheus instance. - * When running individual integ tests there - * is no time for generation of metrics in the test prometheus instance. - * This method gives prometheus time to generate metrics on itself. + * Integ tests are dependent on self generated metrics in prometheus instance. When running + * individual integ tests there is no time for generation of metrics in the test prometheus + * instance. This method gives prometheus time to generate metrics on itself. + * * @throws InterruptedException */ @BeforeClass @@ -42,8 +42,11 @@ protected static void metricGenerationWait() throws InterruptedException { @Override protected void init() throws InterruptedException, IOException { DataSourceMetadata createDSM = - new DataSourceMetadata("my_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "http://localhost:9090")); + new DataSourceMetadata( + "my_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "http://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); @@ -59,26 +62,14 @@ protected void deleteDataSourceMetadata() throws IOException { @Test public void testShowDataSourcesCommands() throws IOException { JSONObject result = executeQuery("show datasources"); - verifyDataRows(result, - rows("my_prometheus", "PROMETHEUS"), - rows("@opensearch", "OPENSEARCH")); - verifyColumn( - result, - columnName("DATASOURCE_NAME"), - columnName("CONNECTOR_TYPE") - ); + verifyDataRows(result, rows("my_prometheus", "PROMETHEUS"), rows("@opensearch", "OPENSEARCH")); + verifyColumn(result, columnName("DATASOURCE_NAME"), columnName("CONNECTOR_TYPE")); } @Test public void testShowDataSourcesCommandsWithWhereClause() throws IOException { JSONObject result = executeQuery("show datasources | where CONNECTOR_TYPE='PROMETHEUS'"); - verifyDataRows(result, - rows("my_prometheus", "PROMETHEUS")); - verifyColumn( - result, - columnName("DATASOURCE_NAME"), - columnName("CONNECTOR_TYPE") - ); + verifyDataRows(result, rows("my_prometheus", "PROMETHEUS")); + verifyColumn(result, columnName("DATASOURCE_NAME"), columnName("CONNECTOR_TYPE")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java index 46111b902e..714557412f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java @@ -19,36 +19,42 @@ public void init() throws IOException { @Test public void test_simple_query_string() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste') | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste') |" + + " fields Id"; var result = executeQuery(query); assertEquals(16, result.getInt("total")); } @Test public void test_simple_query_string_all_params() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['Body', Tags, Title], 'taste beer', default_operator='or'," - + "analyzer=english, analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, flags='PREFIX'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7) | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string(['Body', Tags, Title], 'taste beer'," + + " default_operator='or',analyzer=english, analyze_wildcard = false," + + " quote_field_suffix = '.exact',auto_generate_synonyms_phrase_query=true, boost =" + + " 0.77, flags='PREFIX',fuzzy_transpositions = false, lenient = true," + + " fuzzy_max_expansions = 25,minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length" + + " = 7) | fields Id"; var result = executeQuery(query); assertEquals(49, result.getInt("total")); } @Test public void test_wildcard_simple_query_string() throws IOException { - String query1 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "source=" + TEST_INDEX_BEER - + " | where simple_query_string(['*Date'], '2014-01-22')"; + String query3 = + "source=" + TEST_INDEX_BEER + " | where simple_query_string(['*Date'], '2014-01-22')"; JSONObject result3 = executeQuery(query3); assertEquals(10, result3.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java index 01befa0541..c90a506252 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java index 8ef8787597..f81e1b6615 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.datasource.model.DataSourceMetadata.defaultOpenSearchDataSourceMetadata; @@ -78,17 +77,21 @@ public class StandaloneIT extends PPLIntegTestCase { public void init() { RestHighLevelClient restClient = new InternalRestHighLevelClient(client()); OpenSearchClient client = new OpenSearchRestClient(restClient); - DataSourceService dataSourceService = new DataSourceServiceImpl( - new ImmutableSet.Builder() - .add(new OpenSearchDataSourceFactory(client, defaultSettings())) - .build(), getDataSourceMetadataStorage(), getDataSourceUserRoleHelper()); + DataSourceService dataSourceService = + new DataSourceServiceImpl( + new ImmutableSet.Builder() + .add(new OpenSearchDataSourceFactory(client, defaultSettings())) + .build(), + getDataSourceMetadataStorage(), + getDataSourceUserRoleHelper()); dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata()); ModulesBuilder modules = new ModulesBuilder(); - modules.add(new StandaloneModule(new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); + modules.add( + new StandaloneModule( + new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); Injector injector = modules.createInjector(); - pplService = - SecurityAccess.doPrivileged(() -> injector.getInstance(PPLService.class)); + pplService = SecurityAccess.doPrivileged(() -> injector.getInstance(PPLService.class)); } @Test @@ -146,9 +149,8 @@ public void onFailure(Exception e) { private Settings defaultSettings() { return new Settings() { - private final Map defaultSettings = new ImmutableMap.Builder() - .put(Key.QUERY_SIZE_LIMIT, 200) - .build(); + private final Map defaultSettings = + new ImmutableMap.Builder().put(Key.QUERY_SIZE_LIMIT, 200).build(); @Override public T getSettingValue(Key key) { @@ -162,9 +164,7 @@ public List getSettings() { }; } - /** - * Internal RestHighLevelClient only for testing purpose. - */ + /** Internal RestHighLevelClient only for testing purpose. */ static class InternalRestHighLevelClient extends RestHighLevelClient { public InternalRestHighLevelClient(RestClient restClient) { super(restClient, RestClient::close, Collections.emptyList()); @@ -197,8 +197,8 @@ public StorageEngine storageEngine(OpenSearchClient client) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, - PlanSerializer planSerializer) { + public ExecutionEngine executionEngine( + OpenSearchClient client, ExecutionProtector protector, PlanSerializer planSerializer) { return new OpenSearchExecutionEngine(client, protector, planSerializer); } @@ -257,28 +257,20 @@ public Optional getDataSourceMetadata(String datasourceName) } @Override - public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { - - } + public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) {} @Override - public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { - - } + public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) {} @Override - public void deleteDataSourceMetadata(String datasourceName) { - - } + public void deleteDataSourceMetadata(String datasourceName) {} }; } public static DataSourceUserAuthorizationHelper getDataSourceUserRoleHelper() { return new DataSourceUserAuthorizationHelper() { @Override - public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) { - - } + public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) {} }; } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java index 5389f245a4..92b9e309b8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -66,26 +65,23 @@ public void testStatsDistinctCount() throws IOException { verifySchema(response, schema("distinct_count(gender)", null, "integer")); verifyDataRows(response, rows(2)); - response = - executeQuery(String.format("source=%s | stats dc(age)", TEST_INDEX_ACCOUNT)); + response = executeQuery(String.format("source=%s | stats dc(age)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("dc(age)", null, "integer")); verifyDataRows(response, rows(21)); } @Test public void testStatsMin() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats min(age)", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery(String.format("source=%s | stats min(age)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("min(age)", null, "long")); verifyDataRows(response, rows(20)); } @Test public void testStatsMax() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats max(age)", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery(String.format("source=%s | stats max(age)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("max(age)", null, "long")); verifyDataRows(response, rows(40)); } @@ -93,8 +89,8 @@ public void testStatsMax() throws IOException { @Test public void testStatsNested() throws IOException { JSONObject response = - executeQuery(String.format("source=%s | stats avg(abs(age) * 2) as AGE", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format("source=%s | stats avg(abs(age) * 2) as AGE", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("AGE", null, "double")); verifyDataRows(response, rows(60.342)); } @@ -102,8 +98,7 @@ public void testStatsNested() throws IOException { @Test public void testStatsNestedDoubleValue() throws IOException { JSONObject response = - executeQuery(String.format("source=%s | stats avg(abs(age) * 2.0)", - TEST_INDEX_ACCOUNT)); + executeQuery(String.format("source=%s | stats avg(abs(age) * 2.0)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("avg(abs(age) * 2.0)", null, "double")); verifyDataRows(response, rows(60.342)); } @@ -111,88 +106,87 @@ public void testStatsNestedDoubleValue() throws IOException { @Test public void testStatsWhere() throws IOException { JSONObject response = - executeQuery(String.format( - "source=%s | stats sum(balance) as a by state | where a > 780000", - TEST_INDEX_ACCOUNT)); - verifySchema(response, schema("a", null, "long"), - schema("state", null, "string")); + executeQuery( + String.format( + "source=%s | stats sum(balance) as a by state | where a > 780000", + TEST_INDEX_ACCOUNT)); + verifySchema(response, schema("a", null, "long"), schema("state", null, "string")); verifyDataRows(response, rows(782199, "TX")); } @Test public void testGroupByNullValue() throws IOException { JSONObject response = - executeQuery(String.format( - "source=%s | stats avg(balance) as a by age", - TEST_INDEX_BANK_WITH_NULL_VALUES)); - verifySchema(response, schema("a", null, "double"), - schema("age", null, "integer")); - verifyDataRows(response, + executeQuery( + String.format( + "source=%s | stats avg(balance) as a by age", TEST_INDEX_BANK_WITH_NULL_VALUES)); + verifySchema(response, schema("a", null, "double"), schema("age", null, "integer")); + verifyDataRows( + response, rows(null, null), rows(32838D, 28), rows(39225D, 32), rows(4180D, 33), rows(48086D, 34), - rows(null, 36) - ); + rows(null, 36)); } - //Todo. The column of agg function is in random order. This is because we create the project + // Todo. The column of agg function is in random order. This is because we create the project // all operator from the symbol table which can't maintain the original column order. @Test public void testMultipleAggregationFunction() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats min(age), max(age)", - TEST_INDEX_ACCOUNT)); - verifySchema(response, schema("min(age)", null, "long"), - schema("max(age)", null, "long")); + JSONObject response = + executeQuery(String.format("source=%s | stats min(age), max(age)", TEST_INDEX_ACCOUNT)); + verifySchema(response, schema("min(age)", null, "long"), schema("max(age)", null, "long")); verifyDataRows(response, rows(20, 40)); } @Test public void testStatsWithNull() throws IOException { JSONObject response = - executeQuery(String.format( - "source=%s | stats avg(age)", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + executeQuery(String.format("source=%s | stats avg(age)", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifySchema(response, schema("avg(age)", null, "double")); verifyDataRows(response, rows(33.166666666666664)); } @Test public void testStatsWithMissing() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats avg(balance)", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + JSONObject response = + executeQuery( + String.format("source=%s | stats avg(balance)", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifySchema(response, schema("avg(balance)", null, "double")); verifyDataRows(response, rows(31082.25)); } @Test public void testStatsBySpan() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats count() by span(age,10)", - TEST_INDEX_BANK)); - verifySchema(response, schema("count()", null, "integer"), schema("span(age,10)", null, "integer")); + JSONObject response = + executeQuery(String.format("source=%s | stats count() by span(age,10)", TEST_INDEX_BANK)); + verifySchema( + response, schema("count()", null, "integer"), schema("span(age,10)", null, "integer")); verifyDataRows(response, rows(1, 20), rows(6, 30)); } @Test public void testStatsTimeSpan() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats count() by span(birthdate,1y)", - TEST_INDEX_BANK)); - verifySchema(response, schema("count()", null, "integer"), schema( - "span(birthdate,1y)", null, "timestamp")); + JSONObject response = + executeQuery( + String.format("source=%s | stats count() by span(birthdate,1y)", TEST_INDEX_BANK)); + verifySchema( + response, + schema("count()", null, "integer"), + schema("span(birthdate,1y)", null, "timestamp")); verifyDataRows(response, rows(2, "2017-01-01 00:00:00"), rows(5, "2018-01-01 00:00:00")); } @Test public void testStatsAliasedSpan() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats count() by span(age,10) as age_bucket", - TEST_INDEX_BANK)); - verifySchema(response, schema("count()", null, "integer"), schema("age_bucket", null, "integer")); + JSONObject response = + executeQuery( + String.format( + "source=%s | stats count() by span(age,10) as age_bucket", TEST_INDEX_BANK)); + verifySchema( + response, schema("count()", null, "integer"), schema("age_bucket", null, "integer")); verifyDataRows(response, rows(1, 20), rows(6, 30)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java index de13aa5488..d2cd140e99 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java @@ -26,49 +26,63 @@ public void init() throws IOException { @Test public void typeof_sql_types() throws IOException { - JSONObject response = executeQuery(String.format("source=%s | eval " - + "`str` = typeof('pewpew'), `double` = typeof(1.0)," - + "`int` = typeof(12345), `long` = typeof(1234567891011), `interval` = typeof(INTERVAL 2 DAY)" - + " | fields `str`, `double`, `int`, `long`, `interval`", - TEST_INDEX_DATATYPE_NUMERIC)); + JSONObject response = + executeQuery( + String.format( + "source=%s | eval `str` = typeof('pewpew')," + + " `double` = typeof(1.0)," + + "`int` = typeof(12345)," + + " `long` = typeof(1234567891011)," + + " `interval` = typeof(INTERVAL 2 DAY)" + + " | fields `str`, `double`, `int`, `long`, `interval`", + TEST_INDEX_DATATYPE_NUMERIC)); // TODO: test null in PPL - verifyDataRows(response, - rows("KEYWORD", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); + verifyDataRows(response, rows("KEYWORD", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); - response = executeQuery(String.format("source=%s | eval " - + "`timestamp` = typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," - + "`time` = typeof(CAST('09:07:00' AS TIME))," - + "`date` = typeof(CAST('1961-04-12' AS DATE))," - + "`datetime` = typeof(DATETIME('1961-04-12 09:07:00'))" - + " | fields `timestamp`, `time`, `date`, `datetime`", - TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, - rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + response = + executeQuery( + String.format( + "source=%s | eval " + + "`timestamp` = typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + + "`time` = typeof(CAST('09:07:00' AS TIME))," + + "`date` = typeof(CAST('1961-04-12' AS DATE))," + + "`datetime` = typeof(DATETIME('1961-04-12 09:07:00'))" + + " | fields `timestamp`, `time`, `date`, `datetime`", + TEST_INDEX_DATATYPE_NUMERIC)); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); } @Test public void typeof_opensearch_types() throws IOException { - JSONObject response = executeQuery(String.format("source=%s | eval " - + "`double` = typeof(double_number), `long` = typeof(long_number)," - + "`integer` = typeof(integer_number), `byte` = typeof(byte_number)," - + "`short` = typeof(short_number), `float` = typeof(float_number)," - + "`half_float` = typeof(half_float_number), `scaled_float` = typeof(scaled_float_number)" - + " | fields `double`, `long`, `integer`, `byte`, `short`, `float`, `half_float`, `scaled_float`", - TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, - rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); + JSONObject response = + executeQuery( + String.format( + "source=%s | eval `double` = typeof(double_number), `long` =" + + " typeof(long_number),`integer` = typeof(integer_number), `byte` =" + + " typeof(byte_number),`short` = typeof(short_number), `float` =" + + " typeof(float_number),`half_float` = typeof(half_float_number)," + + " `scaled_float` = typeof(scaled_float_number) | fields `double`, `long`," + + " `integer`, `byte`, `short`, `float`, `half_float`, `scaled_float`", + TEST_INDEX_DATATYPE_NUMERIC)); + verifyDataRows( + response, rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); - response = executeQuery(String.format("source=%s | eval " - + "`text` = typeof(text_value), `date` = typeof(date_value)," - + "`boolean` = typeof(boolean_value), `object` = typeof(object_value)," - + "`keyword` = typeof(keyword_value), `ip` = typeof(ip_value)," - + "`binary` = typeof(binary_value), `geo_point` = typeof(geo_point_value)" - // TODO activate this test once `ARRAY` type supported, see ExpressionAnalyzer::isTypeNotSupported - //+ ", `nested` = typeof(nested_value)" - + " | fields `text`, `date`, `boolean`, `object`, `keyword`, `ip`, `binary`, `geo_point`", - TEST_INDEX_DATATYPE_NONNUMERIC)); - verifyDataRows(response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", - "IP", "BINARY", "GEO_POINT")); + response = + executeQuery( + String.format( + "source=%s | eval " + + "`text` = typeof(text_value), `date` = typeof(date_value)," + + "`boolean` = typeof(boolean_value), `object` = typeof(object_value)," + + "`keyword` = typeof(keyword_value), `ip` = typeof(ip_value)," + + "`binary` = typeof(binary_value), `geo_point` = typeof(geo_point_value)" + // TODO activate this test once `ARRAY` type supported, see + // ExpressionAnalyzer::isTypeNotSupported + // + ", `nested` = typeof(nested_value)" + + " | fields `text`, `date`, `boolean`, `object`, `keyword`, `ip`, `binary`," + + " `geo_point`", + TEST_INDEX_DATATYPE_NONNUMERIC)); + verifyDataRows( + response, + rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java index 024f190bee..dc9f1d98d2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_STRINGS; @@ -23,27 +22,45 @@ public void init() throws IOException { loadIndex(Index.BANK_WITH_STRING_VALUES); } - void verifyQuery(String command, String initialArgs, String additionalArgs, - String outputRow1, String outputRow2, String outputRow3) throws IOException { - String query = String.format( - "source=%s | eval f=%s(%sname%s) | fields f", TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); + void verifyQuery( + String command, + String initialArgs, + String additionalArgs, + String outputRow1, + String outputRow2, + String outputRow3) + throws IOException { + String query = + String.format( + "source=%s | eval f=%s(%sname%s) | fields f", + TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); JSONObject result = executeQuery(query); verifySchema(result, schema("f", null, "string")); verifyDataRows(result, rows(outputRow1), rows(outputRow2), rows(outputRow3)); } - void verifyQuery(String command, String initialArgs, String additionalArgs, - Integer outputRow1, Integer outputRow2, Integer outputRow3) throws IOException { - String query = String.format( - "source=%s | eval f=%s(%sname%s) | fields f", TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); + void verifyQuery( + String command, + String initialArgs, + String additionalArgs, + Integer outputRow1, + Integer outputRow2, + Integer outputRow3) + throws IOException { + String query = + String.format( + "source=%s | eval f=%s(%sname%s) | fields f", + TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); JSONObject result = executeQuery(query); verifySchema(result, schema("f", null, "integer")); verifyDataRows(result, rows(outputRow1), rows(outputRow2), rows(outputRow3)); } - void verifyRegexQuery(String pattern, Integer outputRow1, Integer outputRow2, Integer outputRow3) throws IOException { - String query = String.format( - "source=%s | eval f=name regexp '%s' | fields f", TEST_INDEX_STRINGS, pattern); + void verifyRegexQuery(String pattern, Integer outputRow1, Integer outputRow2, Integer outputRow3) + throws IOException { + String query = + String.format( + "source=%s | eval f=name regexp '%s' | fields f", TEST_INDEX_STRINGS, pattern); JSONObject result = executeQuery(query); verifySchema(result, schema("f", null, "integer")); verifyDataRows(result, rows(outputRow1), rows(outputRow2), rows(outputRow3)); @@ -55,7 +72,7 @@ public void testRegexp() throws IOException { verifyRegexQuery(".*", 1, 1, 1); } - @Test + @Test public void testSubstr() throws IOException { verifyQuery("substr", "", ", 2", "ello", "orld", "elloworld"); verifyQuery("substr", "", ", 2, 2", "el", "or", "el"); @@ -99,14 +116,19 @@ public void testLtrim() throws IOException { @Test public void testConcat() throws IOException { - verifyQuery("concat", "", ", 'there', 'all', '!'", - "hellothereall!", "worldthereall!", "helloworldthereall!"); + verifyQuery( + "concat", + "", + ", 'there', 'all', '!'", + "hellothereall!", + "worldthereall!", + "helloworldthereall!"); } @Test public void testConcat_ws() throws IOException { - verifyQuery("concat_ws", "',', ", ", 'there'", - "hello,there", "world,there", "helloworld,there"); + verifyQuery( + "concat_ws", "',', ", ", 'there'", "hello,there", "world,there", "helloworld,there"); } @Test @@ -137,7 +159,8 @@ public void testLocate() throws IOException { @Test public void testReplace() throws IOException { - verifyQuery("replace", "", ", 'world', ' opensearch'", "hello", " opensearch", "hello opensearch"); + verifyQuery( + "replace", "", ", 'world', ' opensearch'", "hello", " opensearch", "hello opensearch"); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java index 054ff303a1..f9587e4b63 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -14,7 +13,7 @@ import org.json.JSONObject; import org.junit.jupiter.api.Test; -public class TopCommandIT extends PPLIntegTestCase{ +public class TopCommandIT extends PPLIntegTestCase { @Override public void init() throws IOException { @@ -24,30 +23,20 @@ public void init() throws IOException { @Test public void testTopWithoutGroup() throws IOException { - JSONObject result = - executeQuery(String.format("source=%s | top gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("M"), - rows("F")); + JSONObject result = executeQuery(String.format("source=%s | top gender", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("M"), rows("F")); } @Test - public void testTopNWithoutGroup() throws IOException{ - JSONObject result = - executeQuery(String.format("source=%s | top 1 gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("M")); + public void testTopNWithoutGroup() throws IOException { + JSONObject result = executeQuery(String.format("source=%s | top 1 gender", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("M")); } @Test public void testTopNWithGroup() throws IOException { JSONObject result = executeQuery(String.format("source=%s | top 1 state by gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("F", "TX"), - rows("M", "MD")); + verifyDataRows(result, rows("F", "TX"), rows("M", "MD")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java index d530b4140d..263ed502ed 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java @@ -24,8 +24,9 @@ public void init() throws IOException { @Test void format() throws IOException { - String result = executeVizQuery( - String.format(Locale.ROOT, "source=%s | fields firstname, age", TEST_INDEX_BANK), true); + String result = + executeVizQuery( + String.format(Locale.ROOT, "source=%s | fields firstname, age", TEST_INDEX_BANK), true); assertEquals( "{\n" + " \"data\": {\n" @@ -67,8 +68,10 @@ void format() throws IOException { } private String executeVizQuery(String query, boolean pretty) throws IOException { - Request request = buildRequest(query, - QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&pretty=%b", pretty)); + Request request = + buildRequest( + query, + QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&pretty=%b", pretty)); Response response = client().performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); return getResponseBody(response, true); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java index ba870732fd..d56f9ffe32 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java index 1ca21041a3..b037167ed7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.equalTo; @@ -66,8 +65,10 @@ public void describeSingleIndexAlias() throws IOException { public void describeSingleIndexWildcard() throws IOException { JSONObject response1 = executeQuery("DESCRIBE TABLES LIKE \\\"%account\\\""); JSONObject response2 = executeQuery("DESCRIBE TABLES LIKE '%account'"); - JSONObject response3 = executeQuery("DESCRIBE TABLES LIKE '%account' COLUMNS LIKE \\\"%name\\\""); - JSONObject response4 = executeQuery("DESCRIBE TABLES LIKE \\\"%account\\\" COLUMNS LIKE '%name'"); + JSONObject response3 = + executeQuery("DESCRIBE TABLES LIKE '%account' COLUMNS LIKE \\\"%name\\\""); + JSONObject response4 = + executeQuery("DESCRIBE TABLES LIKE \\\"%account\\\" COLUMNS LIKE '%name'"); // 11 rows in the output, each corresponds to a column in the table assertEquals(11, response1.getJSONArray("datarows").length()); assertTrue(response1.similar(response2)); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java index 1075b14431..339cd56370 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java @@ -37,17 +37,19 @@ protected void init() throws Exception { @Test public void testFilteredAggregatePushDown() throws IOException { - JSONObject response = executeQuery( - "SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TEST_INDEX_BANK); + JSONObject response = + executeQuery("SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TEST_INDEX_BANK); verifySchema(response, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); verifyDataRows(response, rows(3)); } @Test public void testFilteredAggregateNotPushDown() throws IOException { - JSONObject response = executeQuery( - "SELECT COUNT(*) FILTER(WHERE age > 35) FROM (SELECT * FROM " + TEST_INDEX_BANK - + ") AS a"); + JSONObject response = + executeQuery( + "SELECT COUNT(*) FILTER(WHERE age > 35) FROM (SELECT * FROM " + + TEST_INDEX_BANK + + ") AS a"); verifySchema(response, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); verifyDataRows(response, rows(3)); } @@ -55,45 +57,65 @@ public void testFilteredAggregateNotPushDown() throws IOException { @Test public void testPushDownAggregationOnNullValues() throws IOException { // OpenSearch aggregation query (MetricAggregation) - var response = executeQuery(String.format( - "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + - "FROM %s WHERE `key` = 'null'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, - schema("min(`int`)", null, "integer"), schema("max(`int`)", null, "integer"), - schema("avg(`int`)", null, "double"), schema("min(`dbl`)", null, "double"), - schema("max(`dbl`)", null, "double"), schema("avg(`dbl`)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + + "FROM %s WHERE `key` = 'null'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, + schema("min(`int`)", null, "integer"), + schema("max(`int`)", null, "integer"), + schema("avg(`int`)", null, "double"), + schema("min(`dbl`)", null, "double"), + schema("max(`dbl`)", null, "double"), + schema("avg(`dbl`)", null, "double")); verifyDataRows(response, rows(null, null, null, null, null, null)); } @Test public void testPushDownAggregationOnMissingValues() throws IOException { // OpenSearch aggregation query (MetricAggregation) - var response = executeQuery(String.format( - "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + - "FROM %s WHERE `key` = 'null'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, - schema("min(`int`)", null, "integer"), schema("max(`int`)", null, "integer"), - schema("avg(`int`)", null, "double"), schema("min(`dbl`)", null, "double"), - schema("max(`dbl`)", null, "double"), schema("avg(`dbl`)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + + "FROM %s WHERE `key` = 'null'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, + schema("min(`int`)", null, "integer"), + schema("max(`int`)", null, "integer"), + schema("avg(`int`)", null, "double"), + schema("min(`dbl`)", null, "double"), + schema("max(`dbl`)", null, "double"), + schema("avg(`dbl`)", null, "double")); verifyDataRows(response, rows(null, null, null, null, null, null)); } @Test public void testInMemoryAggregationOnNullValues() throws IOException { // In-memory aggregation performed by the plugin - var response = executeQuery(String.format("SELECT" - + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," - + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," - + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" - + " FROM %s WHERE `key` = 'null'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT" + + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," + + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," + + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" + + " FROM %s WHERE `key` = 'null'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, schema("min(`int`) over (PARTITION BY `key`)", null, "integer"), schema("max(`int`) over (PARTITION BY `key`)", null, "integer"), schema("avg(`int`) over (PARTITION BY `key`)", null, "double"), schema("min(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("max(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("avg(`dbl`) over (PARTITION BY `key`)", null, "double")); - verifyDataRows(response, // 4 rows with null values + verifyDataRows( + response, // 4 rows with null values rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), @@ -103,19 +125,25 @@ public void testInMemoryAggregationOnNullValues() throws IOException { @Test public void testInMemoryAggregationOnMissingValues() throws IOException { // In-memory aggregation performed by the plugin - var response = executeQuery(String.format("SELECT" - + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," - + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," - + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" - + " FROM %s WHERE `key` = 'missing'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT" + + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," + + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," + + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" + + " FROM %s WHERE `key` = 'missing'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, schema("min(`int`) over (PARTITION BY `key`)", null, "integer"), schema("max(`int`) over (PARTITION BY `key`)", null, "integer"), schema("avg(`int`) over (PARTITION BY `key`)", null, "double"), schema("min(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("max(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("avg(`dbl`) over (PARTITION BY `key`)", null, "double")); - verifyDataRows(response, // 4 rows with null values + verifyDataRows( + response, // 4 rows with null values rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), @@ -124,12 +152,17 @@ public void testInMemoryAggregationOnMissingValues() throws IOException { @Test public void testInMemoryAggregationOnNullValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + " max(int0) over (PARTITION BY `datetime1`)," - + " min(int0) over (PARTITION BY `datetime1`)," - + " avg(int0) over (PARTITION BY `datetime1`)" - + "from %s where int0 IS NULL;", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + + " max(int0) over (PARTITION BY `datetime1`)," + + " min(int0) over (PARTITION BY `datetime1`)," + + " avg(int0) over (PARTITION BY `datetime1`)" + + "from %s where int0 IS NULL;", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("min(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("avg(int0) over (PARTITION BY `datetime1`)", null, "double")); @@ -138,21 +171,31 @@ public void testInMemoryAggregationOnNullValuesReturnsNull() throws IOException @Test public void testInMemoryAggregationOnAllValuesAndOnNotNullReturnsSameResult() throws IOException { - var responseNotNulls = executeQuery(String.format("SELECT " - + " max(int0) over (PARTITION BY `datetime1`)," - + " min(int0) over (PARTITION BY `datetime1`)," - + " avg(int0) over (PARTITION BY `datetime1`)" - + "from %s where int0 IS NOT NULL;", TEST_INDEX_CALCS)); - var responseAllValues = executeQuery(String.format("SELECT " - + " max(int0) over (PARTITION BY `datetime1`)," - + " min(int0) over (PARTITION BY `datetime1`)," - + " avg(int0) over (PARTITION BY `datetime1`)" - + "from %s;", TEST_INDEX_CALCS)); - verifySchema(responseNotNulls, + var responseNotNulls = + executeQuery( + String.format( + "SELECT " + + " max(int0) over (PARTITION BY `datetime1`)," + + " min(int0) over (PARTITION BY `datetime1`)," + + " avg(int0) over (PARTITION BY `datetime1`)" + + "from %s where int0 IS NOT NULL;", + TEST_INDEX_CALCS)); + var responseAllValues = + executeQuery( + String.format( + "SELECT " + + " max(int0) over (PARTITION BY `datetime1`)," + + " min(int0) over (PARTITION BY `datetime1`)," + + " avg(int0) over (PARTITION BY `datetime1`)" + + "from %s;", + TEST_INDEX_CALCS)); + verifySchema( + responseNotNulls, schema("max(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("min(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("avg(int0) over (PARTITION BY `datetime1`)", null, "double")); - verifySchema(responseAllValues, + verifySchema( + responseAllValues, schema("max(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("min(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("avg(int0) over (PARTITION BY `datetime1`)", null, "double")); @@ -163,9 +206,13 @@ public void testInMemoryAggregationOnAllValuesAndOnNotNullReturnsSameResult() th @Test public void testPushDownAggregationOnNullNumericValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(int0), min(int0), avg(int0) from %s where int0 IS NULL;", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + "max(int0), min(int0), avg(int0) from %s where int0 IS NULL;", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(int0)", null, "integer"), schema("min(int0)", null, "integer"), schema("avg(int0)", null, "double")); @@ -174,9 +221,13 @@ public void testPushDownAggregationOnNullNumericValuesReturnsNull() throws IOExc @Test public void testPushDownAggregationOnNullDateTimeValuesFromTableReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(datetime1), min(datetime1), avg(datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + "max(datetime1), min(datetime1), avg(datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(datetime1)", null, "timestamp"), schema("min(datetime1)", null, "timestamp"), schema("avg(datetime1)", null, "timestamp")); @@ -185,9 +236,14 @@ public void testPushDownAggregationOnNullDateTimeValuesFromTableReturnsNull() th @Test public void testPushDownAggregationOnNullDateValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(CAST(NULL AS date)), min(CAST(NULL AS date)), avg(CAST(NULL AS date)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT max(CAST(NULL AS date)), min(CAST(NULL AS date)), avg(CAST(NULL AS date))" + + " from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(CAST(NULL AS date))", null, "date"), schema("min(CAST(NULL AS date))", null, "date"), schema("avg(CAST(NULL AS date))", null, "date")); @@ -196,9 +252,14 @@ public void testPushDownAggregationOnNullDateValuesReturnsNull() throws IOExcept @Test public void testPushDownAggregationOnNullTimeValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(CAST(NULL AS time)), min(CAST(NULL AS time)), avg(CAST(NULL AS time)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT max(CAST(NULL AS time)), min(CAST(NULL AS time)), avg(CAST(NULL AS time))" + + " from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(CAST(NULL AS time))", null, "time"), schema("min(CAST(NULL AS time))", null, "time"), schema("avg(CAST(NULL AS time))", null, "time")); @@ -207,9 +268,14 @@ public void testPushDownAggregationOnNullTimeValuesReturnsNull() throws IOExcept @Test public void testPushDownAggregationOnNullTimeStampValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(CAST(NULL AS timestamp)), min(CAST(NULL AS timestamp)), avg(CAST(NULL AS timestamp)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT max(CAST(NULL AS timestamp)), min(CAST(NULL AS timestamp)), avg(CAST(NULL" + + " AS timestamp)) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(CAST(NULL AS timestamp))", null, "timestamp"), schema("min(CAST(NULL AS timestamp))", null, "timestamp"), schema("avg(CAST(NULL AS timestamp))", null, "timestamp")); @@ -218,9 +284,13 @@ public void testPushDownAggregationOnNullTimeStampValuesReturnsNull() throws IOE @Test public void testPushDownAggregationOnNullDateTimeValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(datetime(NULL)), min(datetime(NULL)), avg(datetime(NULL)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + "max(datetime(NULL)), min(datetime(NULL)), avg(datetime(NULL)) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(datetime(NULL))", null, "datetime"), schema("min(datetime(NULL))", null, "datetime"), schema("avg(datetime(NULL))", null, "datetime")); @@ -229,15 +299,22 @@ public void testPushDownAggregationOnNullDateTimeValuesReturnsNull() throws IOEx @Test public void testPushDownAggregationOnAllValuesAndOnNotNullReturnsSameResult() throws IOException { - var responseNotNulls = executeQuery(String.format("SELECT " - + "max(int0), min(int0), avg(int0) from %s where int0 IS NOT NULL;", TEST_INDEX_CALCS)); - var responseAllValues = executeQuery(String.format("SELECT " - + "max(int0), min(int0), avg(int0) from %s;", TEST_INDEX_CALCS)); - verifySchema(responseNotNulls, + var responseNotNulls = + executeQuery( + String.format( + "SELECT " + "max(int0), min(int0), avg(int0) from %s where int0 IS NOT NULL;", + TEST_INDEX_CALCS)); + var responseAllValues = + executeQuery( + String.format( + "SELECT " + "max(int0), min(int0), avg(int0) from %s;", TEST_INDEX_CALCS)); + verifySchema( + responseNotNulls, schema("max(int0)", null, "integer"), schema("min(int0)", null, "integer"), schema("avg(int0)", null, "double")); - verifySchema(responseAllValues, + verifySchema( + responseAllValues, schema("max(int0)", null, "integer"), schema("min(int0)", null, "integer"), schema("avg(int0)", null, "double")); @@ -248,18 +325,21 @@ public void testPushDownAggregationOnAllValuesAndOnNotNullReturnsSameResult() th @Test public void testPushDownAndInMemoryAggregationReturnTheSameResult() throws IOException { - // Playing with 'over (PARTITION BY `datetime1`)' - `datetime1` column has the same value for all rows + // Playing with 'over (PARTITION BY `datetime1`)' - `datetime1` column has the same value for + // all rows // so partitioning by this column has no sense and doesn't (shouldn't) affect the results // Aggregations with `OVER` clause are executed in memory (in SQL plugin memory), // Aggregations without it are performed the OpenSearch node itself (pushed down to opensearch) - // Going to compare results of `min`, `max` and `avg` aggregation on all numeric columns in `calcs` + // Going to compare results of `min`, `max` and `avg` aggregation on all numeric columns in + // `calcs` var columns = List.of("int0", "int1", "int2", "int3", "num0", "num1", "num2", "num3", "num4"); var aggregations = List.of("min", "max", "avg"); var inMemoryAggregQuery = new StringBuilder("SELECT "); var pushDownAggregQuery = new StringBuilder("SELECT "); for (var col : columns) { for (var aggreg : aggregations) { - inMemoryAggregQuery.append(String.format(" %s(%s) over (PARTITION BY `datetime1`),", aggreg, col)); + inMemoryAggregQuery.append( + String.format(" %s(%s) over (PARTITION BY `datetime1`),", aggreg, col)); pushDownAggregQuery.append(String.format(" %s(%s),", aggreg, col)); } } @@ -267,313 +347,362 @@ public void testPushDownAndInMemoryAggregationReturnTheSameResult() throws IOExc inMemoryAggregQuery.deleteCharAt(inMemoryAggregQuery.length() - 1); pushDownAggregQuery.deleteCharAt(pushDownAggregQuery.length() - 1); - var responseInMemory = executeQuery( - inMemoryAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); - var responsePushDown = executeQuery( - pushDownAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); + var responseInMemory = + executeQuery(inMemoryAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); + var responsePushDown = + executeQuery(pushDownAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); for (int i = 0; i < columns.size() * aggregations.size(); i++) { assertEquals( - ((Number)responseInMemory.query("/datarows/0/" + i)).doubleValue(), - ((Number)responsePushDown.query("/datarows/0/" + i)).doubleValue(), + ((Number) responseInMemory.query("/datarows/0/" + i)).doubleValue(), + ((Number) responsePushDown.query("/datarows/0/" + i)).doubleValue(), 0.0000001); // a minor delta is affordable } } public void testMinIntegerPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(int2)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT min(int2)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(int2)", null, "integer")); verifyDataRows(response, rows(-9)); } @Test public void testMaxIntegerPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(int2)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(int2)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(int2)", null, "integer")); verifyDataRows(response, rows(9)); } @Test public void testAvgIntegerPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(int2)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(int2)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(int2)", null, "double")); verifyDataRows(response, rows(-0.8235294117647058D)); } @Test public void testMinDoublePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(num3)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT min(num3)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(num3)", null, "double")); verifyDataRows(response, rows(-19.96D)); } @Test public void testMaxDoublePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(num3)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(num3)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(num3)", null, "double")); verifyDataRows(response, rows(12.93D)); } @Test public void testAvgDoublePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(num3)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(num3)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(num3)", null, "double")); verifyDataRows(response, rows(-6.12D)); } @Test public void testMinIntegerInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(int2)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(int2) OVER(PARTITION BY datetime1)", null, "integer")); + var response = + executeQuery( + String.format( + "SELECT min(int2)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(int2) OVER(PARTITION BY datetime1)", null, "integer")); verifySome(response.getJSONArray("datarows"), rows(-9)); } @Test public void testMaxIntegerInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(int2)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(int2) OVER(PARTITION BY datetime1)", null, "integer")); + var response = + executeQuery( + String.format( + "SELECT max(int2)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(int2) OVER(PARTITION BY datetime1)", null, "integer")); verifySome(response.getJSONArray("datarows"), rows(9)); } @Test public void testAvgIntegerInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(int2)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(int2) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT avg(int2)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(int2) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(-0.8235294117647058D)); } @Test public void testMinDoubleInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(num3)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(num3) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT min(num3)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(num3) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(-19.96D)); } @Test public void testMaxDoubleInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(num3)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(num3) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT max(num3)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(num3) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(12.93D)); } @Test public void testAvgDoubleInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(num3)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(num3) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT avg(num3)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(num3) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(-6.12D)); } @Test public void testMaxDatePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(date0)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(date0)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(date0)", null, "date")); verifyDataRows(response, rows("2004-06-19")); } @Test public void testAvgDatePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(date0)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(date0)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(date0)", null, "date")); verifyDataRows(response, rows("1992-04-23")); } @Test public void testMinDateTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(datetime(CAST(time0 AS STRING)))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT min(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(datetime(CAST(time0 AS STRING)))", null, "datetime")); verifyDataRows(response, rows("1899-12-30 21:07:32")); } @Test public void testMaxDateTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(datetime(CAST(time0 AS STRING)))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT max(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(datetime(CAST(time0 AS STRING)))", null, "datetime")); verifyDataRows(response, rows("1900-01-01 20:36:00")); } @Test public void testAvgDateTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(datetime(CAST(time0 AS STRING)))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT avg(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(datetime(CAST(time0 AS STRING)))", null, "datetime")); verifyDataRows(response, rows("1900-01-01 03:35:00.236")); } @Test public void testMinTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(time1)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT min(time1)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(time1)", null, "time")); verifyDataRows(response, rows("00:05:57")); } @Test public void testMaxTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(time1)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(time1)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(time1)", null, "time")); verifyDataRows(response, rows("22:50:16")); } @Test public void testAvgTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(time1)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(time1)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(time1)", null, "time")); verifyDataRows(response, rows("13:06:36.25")); } @Test public void testMinTimeStampPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(CAST(datetime0 AS timestamp))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT min(CAST(datetime0 AS timestamp))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(CAST(datetime0 AS timestamp))", null, "timestamp")); verifyDataRows(response, rows("2004-07-04 22:49:28")); } @Test public void testMaxTimeStampPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(CAST(datetime0 AS timestamp))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT max(CAST(datetime0 AS timestamp))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(CAST(datetime0 AS timestamp))", null, "timestamp")); verifyDataRows(response, rows("2004-08-02 07:59:23")); } @Test public void testAvgTimeStampPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(CAST(datetime0 AS timestamp))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT avg(CAST(datetime0 AS timestamp))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(CAST(datetime0 AS timestamp))", null, "timestamp")); verifyDataRows(response, rows("2004-07-20 10:38:09.705")); } @Test public void testMinDateInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(date0)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(date0) OVER(PARTITION BY datetime1)", null, "date")); + var response = + executeQuery( + String.format( + "SELECT min(date0)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(date0) OVER(PARTITION BY datetime1)", null, "date")); verifySome(response.getJSONArray("datarows"), rows("1972-07-04")); } @Test public void testMaxDateInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(date0)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(date0) OVER(PARTITION BY datetime1)", null, "date")); + var response = + executeQuery( + String.format( + "SELECT max(date0)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(date0) OVER(PARTITION BY datetime1)", null, "date")); verifySome(response.getJSONArray("datarows"), rows("2004-06-19")); } @Test public void testAvgDateInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(date0)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(date0) OVER(PARTITION BY datetime1)", null, "date")); + var response = + executeQuery( + String.format( + "SELECT avg(date0)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(date0) OVER(PARTITION BY datetime1)", null, "date")); verifySome(response.getJSONArray("datarows"), rows("1992-04-23")); } @Test public void testMinDateTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(datetime(CAST(time0 AS STRING)))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + var response = + executeQuery( + String.format( + "SELECT min(datetime(CAST(time0 AS STRING)))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "min(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); verifySome(response.getJSONArray("datarows"), rows("1899-12-30 21:07:32")); } @Test public void testMaxDateTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(datetime(CAST(time0 AS STRING)))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + var response = + executeQuery( + String.format( + "SELECT max(datetime(CAST(time0 AS STRING)))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "max(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 20:36:00")); } @Test public void testAvgDateTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(datetime(CAST(time0 AS STRING)))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + var response = + executeQuery( + String.format( + "SELECT avg(datetime(CAST(time0 AS STRING)))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "avg(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 03:35:00.236")); } @Test public void testMinTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(time1)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(time1) OVER(PARTITION BY datetime1)", null, "time")); + var response = + executeQuery( + String.format( + "SELECT min(time1)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(time1) OVER(PARTITION BY datetime1)", null, "time")); verifySome(response.getJSONArray("datarows"), rows("00:05:57")); } @Test public void testMaxTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(time1)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(time1) OVER(PARTITION BY datetime1)", null, "time")); + var response = + executeQuery( + String.format( + "SELECT max(time1)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(time1) OVER(PARTITION BY datetime1)", null, "time")); verifySome(response.getJSONArray("datarows"), rows("22:50:16")); } @Test public void testAvgTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(time1)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(time1) OVER(PARTITION BY datetime1)", null, "time")); + var response = + executeQuery( + String.format( + "SELECT avg(time1)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(time1) OVER(PARTITION BY datetime1)", null, "time")); verifySome(response.getJSONArray("datarows"), rows("13:06:36.25")); } @Test public void testMinTimeStampInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(CAST(datetime0 AS timestamp))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); + var response = + executeQuery( + String.format( + "SELECT min(CAST(datetime0 AS timestamp))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "min(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); verifySome(response.getJSONArray("datarows"), rows("2004-07-04 22:49:28")); } @Test public void testMaxTimeStampInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(CAST(datetime0 AS timestamp))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); + var response = + executeQuery( + String.format( + "SELECT max(CAST(datetime0 AS timestamp))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "max(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); verifySome(response.getJSONArray("datarows"), rows("2004-08-02 07:59:23")); } @Test public void testAvgTimeStampInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(CAST(datetime0 AS timestamp))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); + var response = + executeQuery( + String.format( + "SELECT avg(CAST(datetime0 AS timestamp))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "avg(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); verifySome(response.getJSONArray("datarows"), rows("2004-07-20 10:38:09.705")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java index dd99cf2e75..7c91c42197 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; @@ -24,222 +23,223 @@ public class ArithmeticFunctionIT extends SQLIntegTestCase { - @Override - public void init() throws Exception { - super.init(); - loadIndex(Index.BANK); - } - - public void testAdd() throws IOException { - JSONObject result = executeQuery("select 3 + 2"); - verifySchema(result, schema("3 + 2", null, "integer")); - verifyDataRows(result, rows(3 + 2)); - - result = executeQuery("select 2.5 + 2"); - verifySchema(result, schema("2.5 + 2", null, "double")); - verifyDataRows(result, rows(2.5D + 2)); - - result = executeQuery("select 3000000000 + 2"); - verifySchema(result, schema("3000000000 + 2", null, "long")); - verifyDataRows(result, rows(3000000000L + 2)); - - result = executeQuery("select CAST(6.666666 AS FLOAT) + 2"); - verifySchema(result, schema("CAST(6.666666 AS FLOAT) + 2", null, "float")); - verifyDataRows(result, rows(6.666666 + 2)); - } - - @Test - public void testAddFunction() throws IOException { - JSONObject result = executeQuery("select add(3, 2)"); - verifySchema(result, schema("add(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 + 2)); - - result = executeQuery("select add(2.5, 2)"); - verifySchema(result, schema("add(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D + 2)); - - result = executeQuery("select add(3000000000, 2)"); - verifySchema(result, schema("add(3000000000, 2)", null, "long")); - verifyDataRows(result, rows(3000000000L + 2)); - - result = executeQuery("select add(CAST(6.666666 AS FLOAT), 2)"); - verifySchema(result, schema("add(CAST(6.666666 AS FLOAT), 2)", null, "float")); - verifyDataRows(result, rows(6.666666 + 2)); - } - - public void testDivide() throws IOException { - JSONObject result = executeQuery("select 3 / 2"); - verifySchema(result, schema("3 / 2", null, "integer")); - verifyDataRows(result, rows(3 / 2)); - - result = executeQuery("select 2.5 / 2"); - verifySchema(result, schema("2.5 / 2", null, "double")); - verifyDataRows(result, rows(2.5D / 2)); - - result = executeQuery("select 6000000000 / 2"); - verifySchema(result, schema("6000000000 / 2", null, "long")); - verifyDataRows(result, rows(6000000000L / 2)); - - result = executeQuery("select cast(1.6 AS float) / 2"); - verifySchema(result, schema("cast(1.6 AS float) / 2", null, "float")); - verifyDataRows(result, rows(1.6 / 2)); - } - - public void testDivideFunction() throws IOException { - JSONObject result = executeQuery("select divide(3, 2)"); - verifySchema(result, schema("divide(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 / 2)); - - result = executeQuery("select divide(2.5, 2)"); - verifySchema(result, schema("divide(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D / 2)); - - result = executeQuery("select divide(6000000000, 2)"); - verifySchema(result, schema("divide(6000000000, 2)", null, "long")); - verifyDataRows(result, rows(6000000000L / 2)); - - result = executeQuery("select divide(cast(1.6 AS float), 2)"); - verifySchema(result, schema("divide(cast(1.6 AS float), 2)", null, "float")); - verifyDataRows(result, rows(1.6 / 2)); - } - - public void testMod() throws IOException { - JSONObject result = executeQuery("select mod(3, 2)"); - verifySchema(result, schema("mod(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 % 2)); - - result = executeQuery("select mod(2.5, 2)"); - verifySchema(result, schema("mod(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D % 2)); - - result = executeQuery("select mod(cast(300001 as long), 2)"); - verifySchema(result, schema("mod(cast(300001 as long), 2)", null, "long")); - verifyDataRows(result, rows(3000001 % 2)); - - result = executeQuery("select mod(cast(1.6 AS float), 2)"); - verifySchema(result, schema("mod(cast(1.6 AS float), 2)", null, "float")); - verifyDataRows(result, rows(1.6 % 2)); - } - - public void testModulus() throws IOException { - JSONObject result = executeQuery("select 3 % 2"); - verifySchema(result, schema("3 % 2", null, "integer")); - verifyDataRows(result, rows(3 % 2)); - - result = executeQuery("select 2.5 % 2"); - verifySchema(result, schema("2.5 % 2", null, "double")); - verifyDataRows(result, rows(2.5D % 2)); - - result = executeQuery("select cast(300001 as long) % 2"); - verifySchema(result, schema("cast(300001 as long) % 2", null, "long")); - verifyDataRows(result, rows(300001 % 2)); - - result = executeQuery("select cast(1.6 AS float) % 2"); - verifySchema(result, schema("cast(1.6 AS float) % 2", null, "float")); - verifyDataRows(result, rows(1.6 % 2)); - } - - public void testModulusFunction() throws IOException { - JSONObject result = executeQuery("select modulus(3, 2)"); - verifySchema(result, schema("modulus(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 % 2)); - - result = executeQuery("select modulus(2.5, 2)"); - verifySchema(result, schema("modulus(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D % 2)); - - result = executeQuery("select modulus(cast(300001 as long), 2)"); - verifySchema(result, schema("modulus(cast(300001 as long), 2)", null, "long")); - verifyDataRows(result, rows(300001 % 2)); - - result = executeQuery("select modulus(cast(1.6 AS float), 2)"); - verifySchema(result, schema("modulus(cast(1.6 AS float), 2)", null, "float")); - verifyDataRows(result, rows(1.6 % 2)); - } - - public void testMultiply() throws IOException { - JSONObject result = executeQuery("select 3 * 2"); - verifySchema(result, schema("3 * 2", null, "integer")); - verifyDataRows(result, rows(3 * 2)); - - result = executeQuery("select 2.5 * 2"); - verifySchema(result, schema("2.5 * 2", null, "double")); - verifyDataRows(result, rows(2.5D * 2)); - - result = executeQuery("select 3000000000 * 2"); - verifySchema(result, schema("3000000000 * 2", null, "long")); - verifyDataRows(result, rows(3000000000L * 2)); - - result = executeQuery("select CAST(1.6 AS FLOAT) * 2"); - verifySchema(result, schema("CAST(1.6 AS FLOAT) * 2", null, "float")); - verifyDataRows(result, rows(1.6 * 2)); - } - - @Test - public void testMultiplyFunction() throws IOException { - JSONObject result = executeQuery("select multiply(3, 2)"); - verifySchema(result, schema("multiply(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 * 2)); - - result = executeQuery("select multiply(2.5, 2)"); - verifySchema(result, schema("multiply(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D * 2)); - - result = executeQuery("select multiply(3000000000, 2)"); - verifySchema(result, schema("multiply(3000000000, 2)", null, "long")); - verifyDataRows(result, rows(3000000000L * 2)); - - result = executeQuery("select multiply(CAST(1.6 AS FLOAT), 2)"); - verifySchema(result, schema("multiply(CAST(1.6 AS FLOAT), 2)", null, "float")); - verifyDataRows(result, rows(1.6 * 2)); - } - - public void testSubtract() throws IOException { - JSONObject result = executeQuery("select 3 - 2"); - verifySchema(result, schema("3 - 2", null, "integer")); - verifyDataRows(result, rows(3 - 2)); - - result = executeQuery("select 2.5 - 2"); - verifySchema(result, schema("2.5 - 2", null, "double")); - verifyDataRows(result, rows(2.5D - 2)); - - result = executeQuery("select 3000000000 - 2"); - verifySchema(result, schema("3000000000 - 2", null, "long")); - verifyDataRows(result, rows(3000000000L - 2)); - - result = executeQuery("select CAST(6.666666 AS FLOAT) - 2"); - verifySchema(result, schema("CAST(6.666666 AS FLOAT) - 2", null, "float")); - verifyDataRows(result, rows(6.666666 - 2)); - } - - @Test - public void testSubtractFunction() throws IOException { - JSONObject result = executeQuery("select subtract(3, 2)"); - verifySchema(result, schema("subtract(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 - 2)); - - result = executeQuery("select subtract(2.5, 2)"); - verifySchema(result, schema("subtract(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D - 2)); - - result = executeQuery("select subtract(3000000000, 2)"); - verifySchema(result, schema("subtract(3000000000, 2)", null, "long")); - verifyDataRows(result, rows(3000000000L - 2)); - - result = executeQuery("select cast(subtract(cast(6.666666 as float), 2) as float)"); - verifySchema(result, schema("cast(subtract(cast(6.666666 as float), 2) as float)", null, "float")); - verifyDataRows(result, rows(6.666666 - 2)); - } - - protected JSONObject executeQuery(String query) throws IOException { - Request request = new Request("POST", QUERY_API_ENDPOINT); - request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - return new JSONObject(getResponseBody(response)); - } + @Override + public void init() throws Exception { + super.init(); + loadIndex(Index.BANK); + } + + public void testAdd() throws IOException { + JSONObject result = executeQuery("select 3 + 2"); + verifySchema(result, schema("3 + 2", null, "integer")); + verifyDataRows(result, rows(3 + 2)); + + result = executeQuery("select 2.5 + 2"); + verifySchema(result, schema("2.5 + 2", null, "double")); + verifyDataRows(result, rows(2.5D + 2)); + + result = executeQuery("select 3000000000 + 2"); + verifySchema(result, schema("3000000000 + 2", null, "long")); + verifyDataRows(result, rows(3000000000L + 2)); + + result = executeQuery("select CAST(6.666666 AS FLOAT) + 2"); + verifySchema(result, schema("CAST(6.666666 AS FLOAT) + 2", null, "float")); + verifyDataRows(result, rows(6.666666 + 2)); + } + + @Test + public void testAddFunction() throws IOException { + JSONObject result = executeQuery("select add(3, 2)"); + verifySchema(result, schema("add(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 + 2)); + + result = executeQuery("select add(2.5, 2)"); + verifySchema(result, schema("add(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D + 2)); + + result = executeQuery("select add(3000000000, 2)"); + verifySchema(result, schema("add(3000000000, 2)", null, "long")); + verifyDataRows(result, rows(3000000000L + 2)); + + result = executeQuery("select add(CAST(6.666666 AS FLOAT), 2)"); + verifySchema(result, schema("add(CAST(6.666666 AS FLOAT), 2)", null, "float")); + verifyDataRows(result, rows(6.666666 + 2)); + } + + public void testDivide() throws IOException { + JSONObject result = executeQuery("select 3 / 2"); + verifySchema(result, schema("3 / 2", null, "integer")); + verifyDataRows(result, rows(3 / 2)); + + result = executeQuery("select 2.5 / 2"); + verifySchema(result, schema("2.5 / 2", null, "double")); + verifyDataRows(result, rows(2.5D / 2)); + + result = executeQuery("select 6000000000 / 2"); + verifySchema(result, schema("6000000000 / 2", null, "long")); + verifyDataRows(result, rows(6000000000L / 2)); + + result = executeQuery("select cast(1.6 AS float) / 2"); + verifySchema(result, schema("cast(1.6 AS float) / 2", null, "float")); + verifyDataRows(result, rows(1.6 / 2)); + } + + public void testDivideFunction() throws IOException { + JSONObject result = executeQuery("select divide(3, 2)"); + verifySchema(result, schema("divide(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 / 2)); + + result = executeQuery("select divide(2.5, 2)"); + verifySchema(result, schema("divide(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D / 2)); + + result = executeQuery("select divide(6000000000, 2)"); + verifySchema(result, schema("divide(6000000000, 2)", null, "long")); + verifyDataRows(result, rows(6000000000L / 2)); + + result = executeQuery("select divide(cast(1.6 AS float), 2)"); + verifySchema(result, schema("divide(cast(1.6 AS float), 2)", null, "float")); + verifyDataRows(result, rows(1.6 / 2)); + } + + public void testMod() throws IOException { + JSONObject result = executeQuery("select mod(3, 2)"); + verifySchema(result, schema("mod(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 % 2)); + + result = executeQuery("select mod(2.5, 2)"); + verifySchema(result, schema("mod(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D % 2)); + + result = executeQuery("select mod(cast(300001 as long), 2)"); + verifySchema(result, schema("mod(cast(300001 as long), 2)", null, "long")); + verifyDataRows(result, rows(3000001 % 2)); + + result = executeQuery("select mod(cast(1.6 AS float), 2)"); + verifySchema(result, schema("mod(cast(1.6 AS float), 2)", null, "float")); + verifyDataRows(result, rows(1.6 % 2)); + } + + public void testModulus() throws IOException { + JSONObject result = executeQuery("select 3 % 2"); + verifySchema(result, schema("3 % 2", null, "integer")); + verifyDataRows(result, rows(3 % 2)); + + result = executeQuery("select 2.5 % 2"); + verifySchema(result, schema("2.5 % 2", null, "double")); + verifyDataRows(result, rows(2.5D % 2)); + + result = executeQuery("select cast(300001 as long) % 2"); + verifySchema(result, schema("cast(300001 as long) % 2", null, "long")); + verifyDataRows(result, rows(300001 % 2)); + + result = executeQuery("select cast(1.6 AS float) % 2"); + verifySchema(result, schema("cast(1.6 AS float) % 2", null, "float")); + verifyDataRows(result, rows(1.6 % 2)); + } + + public void testModulusFunction() throws IOException { + JSONObject result = executeQuery("select modulus(3, 2)"); + verifySchema(result, schema("modulus(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 % 2)); + + result = executeQuery("select modulus(2.5, 2)"); + verifySchema(result, schema("modulus(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D % 2)); + + result = executeQuery("select modulus(cast(300001 as long), 2)"); + verifySchema(result, schema("modulus(cast(300001 as long), 2)", null, "long")); + verifyDataRows(result, rows(300001 % 2)); + + result = executeQuery("select modulus(cast(1.6 AS float), 2)"); + verifySchema(result, schema("modulus(cast(1.6 AS float), 2)", null, "float")); + verifyDataRows(result, rows(1.6 % 2)); + } + + public void testMultiply() throws IOException { + JSONObject result = executeQuery("select 3 * 2"); + verifySchema(result, schema("3 * 2", null, "integer")); + verifyDataRows(result, rows(3 * 2)); + + result = executeQuery("select 2.5 * 2"); + verifySchema(result, schema("2.5 * 2", null, "double")); + verifyDataRows(result, rows(2.5D * 2)); + + result = executeQuery("select 3000000000 * 2"); + verifySchema(result, schema("3000000000 * 2", null, "long")); + verifyDataRows(result, rows(3000000000L * 2)); + + result = executeQuery("select CAST(1.6 AS FLOAT) * 2"); + verifySchema(result, schema("CAST(1.6 AS FLOAT) * 2", null, "float")); + verifyDataRows(result, rows(1.6 * 2)); + } + + @Test + public void testMultiplyFunction() throws IOException { + JSONObject result = executeQuery("select multiply(3, 2)"); + verifySchema(result, schema("multiply(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 * 2)); + + result = executeQuery("select multiply(2.5, 2)"); + verifySchema(result, schema("multiply(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D * 2)); + + result = executeQuery("select multiply(3000000000, 2)"); + verifySchema(result, schema("multiply(3000000000, 2)", null, "long")); + verifyDataRows(result, rows(3000000000L * 2)); + + result = executeQuery("select multiply(CAST(1.6 AS FLOAT), 2)"); + verifySchema(result, schema("multiply(CAST(1.6 AS FLOAT), 2)", null, "float")); + verifyDataRows(result, rows(1.6 * 2)); + } + + public void testSubtract() throws IOException { + JSONObject result = executeQuery("select 3 - 2"); + verifySchema(result, schema("3 - 2", null, "integer")); + verifyDataRows(result, rows(3 - 2)); + + result = executeQuery("select 2.5 - 2"); + verifySchema(result, schema("2.5 - 2", null, "double")); + verifyDataRows(result, rows(2.5D - 2)); + + result = executeQuery("select 3000000000 - 2"); + verifySchema(result, schema("3000000000 - 2", null, "long")); + verifyDataRows(result, rows(3000000000L - 2)); + + result = executeQuery("select CAST(6.666666 AS FLOAT) - 2"); + verifySchema(result, schema("CAST(6.666666 AS FLOAT) - 2", null, "float")); + verifyDataRows(result, rows(6.666666 - 2)); + } + + @Test + public void testSubtractFunction() throws IOException { + JSONObject result = executeQuery("select subtract(3, 2)"); + verifySchema(result, schema("subtract(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 - 2)); + + result = executeQuery("select subtract(2.5, 2)"); + verifySchema(result, schema("subtract(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D - 2)); + + result = executeQuery("select subtract(3000000000, 2)"); + verifySchema(result, schema("subtract(3000000000, 2)", null, "long")); + verifyDataRows(result, rows(3000000000L - 2)); + + result = executeQuery("select cast(subtract(cast(6.666666 as float), 2) as float)"); + verifySchema( + result, schema("cast(subtract(cast(6.666666 as float), 2) as float)", null, "float")); + verifyDataRows(result, rows(6.666666 - 2)); + } + + protected JSONObject executeQuery(String query) throws IOException { + Request request = new Request("POST", QUERY_API_ENDPOINT); + request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + return new JSONObject(getResponseBody(response)); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java index ab0900784d..deb41653e2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.equalTo; @@ -42,9 +41,11 @@ public void init() throws Exception { @Test public void ifnullShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT IFNULL(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT - + " GROUP BY name"); + JSONObject response = + executeJdbcRequest( + "SELECT IFNULL(lastname, 'unknown') AS name FROM " + + TEST_INDEX_ACCOUNT + + " GROUP BY name"); assertEquals("IFNULL(lastname, 'unknown')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -52,87 +53,95 @@ public void ifnullShouldPassJDBC() throws IOException { @Test public void ifnullWithNullInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT IFNULL(null, firstname) as IFNULL1 ," + JSONObject response = + new JSONObject( + executeQuery( + "SELECT IFNULL(null, firstname) as IFNULL1 ," + " IFNULL(firstname, null) as IFNULL2 ," + " IFNULL(null, null) as IFNULL3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 2", "jdbc")); - - verifySchema(response, - schema("IFNULL(null, firstname)", "IFNULL1", "keyword"), - schema("IFNULL(firstname, null)", "IFNULL2", "keyword"), - schema("IFNULL(null, null)", "IFNULL3", "byte")); - verifyDataRows(response, - rows("Hattie", "Hattie", LITERAL_NULL.value()), - rows( "Elinor", "Elinor", LITERAL_NULL.value()) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 2", + "jdbc")); + + verifySchema( + response, + schema("IFNULL(null, firstname)", "IFNULL1", "keyword"), + schema("IFNULL(firstname, null)", "IFNULL2", "keyword"), + schema("IFNULL(null, null)", "IFNULL3", "byte")); + verifyDataRows( + response, + rows("Hattie", "Hattie", LITERAL_NULL.value()), + rows("Elinor", "Elinor", LITERAL_NULL.value())); } @Test public void ifnullWithMissingInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT IFNULL(balance, 100) as IFNULL1, " + JSONObject response = + new JSONObject( + executeQuery( + "SELECT IFNULL(balance, 100) as IFNULL1, " + " IFNULL(200, balance) as IFNULL2, " + " IFNULL(balance, balance) as IFNULL3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 3", "jdbc")); - verifySchema(response, - schema("IFNULL(balance, 100)", "IFNULL1", "long"), - schema("IFNULL(200, balance)", "IFNULL2", "long"), - schema("IFNULL(balance, balance)", "IFNULL3", "long")); - verifyDataRows(response, - rows(100, 200, null), - rows(100, 200, null), - rows(100, 200, null) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 3", + "jdbc")); + verifySchema( + response, + schema("IFNULL(balance, 100)", "IFNULL1", "long"), + schema("IFNULL(200, balance)", "IFNULL2", "long"), + schema("IFNULL(balance, balance)", "IFNULL3", "long")); + verifyDataRows(response, rows(100, 200, null), rows(100, 200, null), rows(100, 200, null)); } @Test public void nullifShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT NULLIF(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT); + JSONObject response = + executeJdbcRequest("SELECT NULLIF(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("NULLIF(lastname, 'unknown')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); } @Test - public void nullifWithNotNullInputTestOne(){ - JSONObject response = new JSONObject(executeQuery( - "SELECT NULLIF(firstname, 'Amber JOHnny') as testnullif " - + "FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " limit 2 ", "jdbc")); - verifySchema(response, - schema("NULLIF(firstname, 'Amber JOHnny')", "testnullif", "keyword")); - verifyDataRows(response, - rows(LITERAL_NULL.value()), - rows("Hattie") - ); + public void nullifWithNotNullInputTestOne() { + JSONObject response = + new JSONObject( + executeQuery( + "SELECT NULLIF(firstname, 'Amber JOHnny') as testnullif " + + "FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " limit 2 ", + "jdbc")); + verifySchema(response, schema("NULLIF(firstname, 'Amber JOHnny')", "testnullif", "keyword")); + verifyDataRows(response, rows(LITERAL_NULL.value()), rows("Hattie")); } @Test public void nullifWithNullInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT NULLIF(1/0, 123) as nullif1 ," + JSONObject response = + new JSONObject( + executeQuery( + "SELECT NULLIF(1/0, 123) as nullif1 ," + " NULLIF(123, 1/0) as nullif2 ," + " NULLIF(1/0, 1/0) as nullif3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 1", "jdbc")); - verifySchema(response, - schema("NULLIF(1/0, 123)", "nullif1", "integer"), - schema("NULLIF(123, 1/0)", "nullif2", "integer"), - schema("NULLIF(1/0, 1/0)", "nullif3", "integer")); - verifyDataRows(response, - rows(LITERAL_NULL.value(), 123, LITERAL_NULL.value() - ) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 1", + "jdbc")); + verifySchema( + response, + schema("NULLIF(1/0, 123)", "nullif1", "integer"), + schema("NULLIF(123, 1/0)", "nullif2", "integer"), + schema("NULLIF(1/0, 1/0)", "nullif3", "integer")); + verifyDataRows(response, rows(LITERAL_NULL.value(), 123, LITERAL_NULL.value())); } @Test public void isnullShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); + JSONObject response = + executeJdbcRequest("SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("ISNULL(lastname)", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("boolean", response.query("/schema/0/type")); @@ -141,47 +150,48 @@ public void isnullShouldPassJDBC() throws IOException { @Test public void isnullWithNotNullInputTest() throws IOException { assertThat( - executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( - executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); } @Test public void isnullWithNullInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT ISNULL(1/0) as ISNULL1 ," + JSONObject response = + new JSONObject( + executeQuery( + "SELECT ISNULL(1/0) as ISNULL1 ," + " ISNULL(firstname) as ISNULL2 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 2", "jdbc")); - verifySchema(response, - schema("ISNULL(1/0)", "ISNULL1", "boolean"), - schema("ISNULL(firstname)", "ISNULL2", "boolean")); - verifyDataRows(response, - rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()), - rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 2", + "jdbc")); + verifySchema( + response, + schema("ISNULL(1/0)", "ISNULL1", "boolean"), + schema("ISNULL(firstname)", "ISNULL2", "boolean")); + verifyDataRows( + response, + rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()), + rows(LITERAL_TRUE.value(), LITERAL_FALSE.value())); } @Test - public void isnullWithMathExpr() throws IOException{ + public void isnullWithMathExpr() throws IOException { assertThat( - executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( - executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(1))) - ); + executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(1)))); } @Test public void ifShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT IF(2 > 0, 'hello', 'world') AS name FROM " + TEST_INDEX_ACCOUNT); + JSONObject response = + executeJdbcRequest("SELECT IF(2 > 0, 'hello', 'world') AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("IF(2 > 0, 'hello', 'world')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -189,33 +199,37 @@ public void ifShouldPassJDBC() throws IOException { @Test public void ifWithTrueAndFalseCondition() throws IOException { - JSONObject response = new JSONObject(executeQuery( - "SELECT IF(2 < 0, firstname, lastname) as IF0, " - + " IF(2 > 0, firstname, lastname) as IF1, " - + " firstname as IF2, " - + " lastname as IF3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " limit 2 ", "jdbc" )); - verifySchema(response, - schema("IF(2 < 0, firstname, lastname)", "IF0", "keyword"), - schema("IF(2 > 0, firstname, lastname)", "IF1", "keyword"), - schema("firstname", "IF2", "text"), - schema("lastname", "IF3", "keyword") - ); - verifyDataRows(response, - rows("Duke Willmington", "Amber JOHnny", "Amber JOHnny", "Duke Willmington"), - rows("Bond", "Hattie", "Hattie", "Bond") - ); - + JSONObject response = + new JSONObject( + executeQuery( + "SELECT IF(2 < 0, firstname, lastname) as IF0, " + + " IF(2 > 0, firstname, lastname) as IF1, " + + " firstname as IF2, " + + " lastname as IF3 " + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " limit 2 ", + "jdbc")); + verifySchema( + response, + schema("IF(2 < 0, firstname, lastname)", "IF0", "keyword"), + schema("IF(2 > 0, firstname, lastname)", "IF1", "keyword"), + schema("firstname", "IF2", "text"), + schema("lastname", "IF3", "keyword")); + verifyDataRows( + response, + rows("Duke Willmington", "Amber JOHnny", "Amber JOHnny", "Duke Willmington"), + rows("Bond", "Hattie", "Hattie", "Bond")); } private SearchHits query(String query) throws IOException { final String rsp = executeQueryWithStringOutput(query); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(rsp)); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(rsp)); return SearchResponse.fromXContent(parser).getHits(); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java index b719edd5b0..76600b6561 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java @@ -1,7 +1,7 @@ - /* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.sql.sql; @@ -16,110 +16,97 @@ public class ConvertTZFunctionIT extends SQLIntegTestCase { - @Override public void init() throws Exception { super.init(); loadIndex(Index.BANK); } - @Test public void inRangeZeroToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2008-05-15 12:00:00','+00:00','+10:00')"); - verifySchema(result, - schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2008-05-15 12:00:00','+00:00','+10:00')"); + verifySchema( + result, schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "datetime")); verifyDataRows(result, rows("2008-05-15 22:00:00")); } @Test public void inRangeNegativeZeroToPositiveZero() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 00:00:00','-00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','-00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 00:00:00")); } @Test public void inRangePositiveToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 00:00:00','+10:00','+11:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','+10:00','+11:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 01:00:00")); } @Test public void inRangeNegativeToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','-08:00','+09:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-08:00','+09:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-13 04:34:50")); } @Test public void inRangeSameTimeZone() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','+09:00','+09:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+09:00','+09:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 11:34:50")); } @Test public void inRangeTwentyFourHourTimeOffset() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','-12:00','+12:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+12:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-13 11:34:50")); } @Test public void inRangeFifteenMinuteTimeZones() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 13:00:00','+09:30','+05:45')"); - verifySchema(result, - schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:30','+05:45')"); + verifySchema( + result, schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 09:15:00")); } @Test public void inRangeRandomTimes() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 13:00:00','+09:31','+05:11')"); - verifySchema(result, - schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:31','+05:11')"); + verifySchema( + result, schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 08:40:00")); } @Test public void nullField2Under() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-30 11:34:50','-14:00','+08:00')"); - verifySchema(result, - schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-30 11:34:50','-14:00','+08:00')"); + verifySchema( + result, schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullField3Over() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','-12:00','+14:01')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+14:01')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void inRangeMinOnPoint() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 15:00:00','-13:59','-13:59')"); - verifySchema(result, - schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 15:00:00','-13:59','-13:59')"); + verifySchema( + result, schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 15:00:00")); } @@ -129,57 +116,50 @@ public void inRangeMinOnPoint() throws IOException { // Invalid input returns null. @Test public void nullField3InvalidInput() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','+10:0','+14:01')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+10:0','+14:01')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullField2InvalidInput() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','+14:01','****')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+14:01','****')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } // Invalid input in the datetime field of CONVERT_TZ results in a null field. It is any input // which is not of the format `yyyy-MM-dd HH:mm:ss` @Test public void nullDateTimeInvalidInput() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021----','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021----','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021----','+00:00','+00:00')"); + verifySchema(result, schema("convert_tz('2021----','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-02-30 10:00:00','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-02-30 10:00:00','+00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-04-31 10:00:00','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-04-31 10:00:00','+00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-13-03 10:00:00','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-13-03 10:00:00','+00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java b/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java index cd5765e0ce..33c9c0687f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static java.util.Collections.emptyMap; @@ -25,15 +24,13 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * SQL integration test base class. This is very similar to CorrectnessIT though - * enforce the success of all tests rather than report failures only. + * SQL integration test base class. This is very similar to CorrectnessIT though enforce the success + * of all tests rather than report failures only. */ @ThreadLeakScope(ThreadLeakScope.Scope.NONE) public abstract class CorrectnessTestBase extends RestIntegTestCase { - /** - * Comparison test runner shared by all methods in this IT class. - */ + /** Comparison test runner shared by all methods in this IT class. */ private static ComparisonTest runner; @Override @@ -43,8 +40,7 @@ protected void init() throws Exception { } TestConfig config = new TestConfig(emptyMap()); - runner = new ComparisonTest(getOpenSearchConnection(), - getOtherDBConnections(config)); + runner = new ComparisonTest(getOpenSearchConnection(), getOtherDBConnections(config)); runner.connect(); for (TestDataSet dataSet : config.getTestDataSets()) { @@ -52,9 +48,7 @@ protected void init() throws Exception { } } - /** - * Clean up test data and close other database connection. - */ + /** Clean up test data and close other database connection. */ @AfterClass public static void cleanUp() { if (runner == null) { @@ -74,33 +68,29 @@ public static void cleanUp() { } /** - * Execute the given queries and compare result with other database. - * The queries will be considered as one test batch. + * Execute the given queries and compare result with other database. The queries will be + * considered as one test batch. */ protected void verify(String... queries) { TestReport result = runner.verify(new TestQuerySet(queries)); TestSummary summary = result.getSummary(); - Assert.assertEquals(StringUtils.format( - "Comparison test failed on queries: %s", new JSONObject(result).toString(2)), - 0, summary.getFailure()); + Assert.assertEquals( + StringUtils.format( + "Comparison test failed on queries: %s", new JSONObject(result).toString(2)), + 0, + summary.getFailure()); } - /** - * Use OpenSearch cluster initialized by OpenSearch Gradle task. - */ + /** Use OpenSearch cluster initialized by OpenSearch Gradle task. */ private DBConnection getOpenSearchConnection() { String openSearchHost = client().getNodes().get(0).getHost().toString(); return new OpenSearchConnection("jdbc:opensearch://" + openSearchHost, client()); } - /** - * Create database connection with database name and connect URL. - */ + /** Create database connection with database name and connect URL. */ private DBConnection[] getOtherDBConnections(TestConfig config) { - return config.getOtherDbConnectionNameAndUrls() - .entrySet().stream() - .map(e -> new JDBCConnection(e.getKey(), e.getValue())) - .toArray(DBConnection[]::new); + return config.getOtherDbConnectionNameAndUrls().entrySet().stream() + .map(e -> new JDBCConnection(e.getKey(), e.getValue())) + .toArray(DBConnection[]::new); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java index 3af4db89de..330268c0e4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; @@ -26,36 +25,45 @@ public void init() throws IOException { @Test public void sanitizeTest() { - String result = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), "csv"); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "'+Amber JOHnny,Duke Willmington+%n" - + "'-Hattie,Bond-%n" - + "'=Nanette,Bates=%n" - + "'@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeQuery( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), + "csv"); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "'+Amber JOHnny,Duke Willmington+%n" + + "'-Hattie,Bond-%n" + + "'=Nanette,Bates=%n" + + "'@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @Test public void escapeSanitizeTest() { - String result = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), - "csv&sanitize=false"); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "+Amber JOHnny,Duke Willmington+%n" - + "-Hattie,Bond-%n" - + "=Nanette,Bates=%n" - + "@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeQuery( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), + "csv&sanitize=false"); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "+Amber JOHnny,Duke Willmington+%n" + + "-Hattie,Bond-%n" + + "=Nanette,Bates=%n" + + "@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @Test public void contentHeaderTest() throws IOException { - String query = String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE); + String query = + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE); String requestBody = makeRequest(query); Request sqlRequest = new Request("POST", "/_plugins/_sql?format=csv"); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java index e935b269f1..432daef82f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java @@ -54,9 +54,10 @@ public void resetTimeZone() { private String name; private Boolean expectedResult; - public DateTimeComparisonIT(@Name("functionCall") String functionCall, - @Name("name") String name, - @Name("expectedResult") Boolean expectedResult) { + public DateTimeComparisonIT( + @Name("functionCall") String functionCall, + @Name("name") String name, + @Name("expectedResult") Boolean expectedResult) { this.functionCall = functionCall; this.name = name; this.expectedResult = expectedResult; @@ -64,542 +65,698 @@ public DateTimeComparisonIT(@Name("functionCall") String functionCall, @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), - $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), - $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), - $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), - $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), - $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), - $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), - $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), - $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), - $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), - $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), - $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), - $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), - $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), - $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), + $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), + $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), + $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), + $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), + $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), + $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), + $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), + $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), + $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), + $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), + $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), + $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), + $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), + $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimes() { - return Arrays.asList($$( - $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), - $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), - $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), - $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), - $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), - $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), - $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), - $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), - $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), - $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), - $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), - $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), - $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), - $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), - $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), + $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), + $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), + $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), + $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), + $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), + $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), + $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), + $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), + $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), + $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), + $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), + $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), + $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), + $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDateTimes() { - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $("DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), + $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), + $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), + $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), + $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), + $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), + $( + "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), - $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), - $("TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", "neq3", false), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), - $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", "gte3", false), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), + $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", + "neq3", + false), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), + $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), + $( + "TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", + "gte3", + false), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), - $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), + $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), + $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), + $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), - $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), + $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), + $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), + $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), - $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), + $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), + $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false))); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java index fc05e502c5..d6f2d2c7f4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE_FORMATS; @@ -34,57 +33,72 @@ public void init() throws Exception { @Test public void testReadingDateFormats() throws IOException { - String query = String.format("SELECT weekyear_week_day, hour_minute_second_millis," + - " strict_ordinal_date_time FROM %s LIMIT 1", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT weekyear_week_day, hour_minute_second_millis," + + " strict_ordinal_date_time FROM %s LIMIT 1", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("weekyear_week_day", null, "date"), schema("hour_minute_second_millis", null, "time"), schema("strict_ordinal_date_time", null, "timestamp")); - verifyDataRows(result, - rows("1984-04-12", - "09:07:42", - "1984-04-12 09:07:42.000123456" - )); + verifyDataRows(result, rows("1984-04-12", "09:07:42", "1984-04-12 09:07:42.000123456")); } @Test public void testDateFormatsWithOr() throws IOException { - String query = String.format("SELECT yyyy-MM-dd_OR_epoch_millis FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format("SELECT yyyy-MM-dd_OR_epoch_millis FROM %s", TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifyDataRows(result, - rows("1984-04-12 00:00:00"), - rows("1984-04-12 09:07:42.000123456")); + verifyDataRows(result, rows("1984-04-12 00:00:00"), rows("1984-04-12 09:07:42.000123456")); } @Test @SneakyThrows public void testCustomFormats() { - String query = String.format("SELECT custom_time, custom_timestamp, custom_date_or_date," - + "custom_date_or_custom_time, custom_time_parser_check FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT custom_time, custom_timestamp, custom_date_or_date," + + "custom_date_or_custom_time, custom_time_parser_check FROM %s", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("custom_time", null, "time"), schema("custom_timestamp", null, "timestamp"), schema("custom_date_or_date", null, "date"), schema("custom_date_or_custom_time", null, "timestamp"), schema("custom_time_parser_check", null, "time")); - verifyDataRows(result, - rows("09:07:42", "1984-04-12 09:07:42", "1984-04-12", "1961-04-12 00:00:00", "23:44:36.321"), - rows("21:07:42", "1984-04-12 22:07:42", "1984-04-12", "1970-01-01 09:07:00", "09:01:16.542")); + verifyDataRows( + result, + rows( + "09:07:42", "1984-04-12 09:07:42", "1984-04-12", "1961-04-12 00:00:00", "23:44:36.321"), + rows( + "21:07:42", + "1984-04-12 22:07:42", + "1984-04-12", + "1970-01-01 09:07:00", + "09:01:16.542")); } @Test @SneakyThrows public void testCustomFormats2() { - String query = String.format("SELECT custom_no_delimiter_date, custom_no_delimiter_time," - + "custom_no_delimiter_ts FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT custom_no_delimiter_date, custom_no_delimiter_time," + + "custom_no_delimiter_ts FROM %s", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("custom_no_delimiter_date", null, "date"), schema("custom_no_delimiter_time", null, "time"), schema("custom_no_delimiter_ts", null, "timestamp")); - verifyDataRows(result, + verifyDataRows( + result, rows("1984-10-20", "10:20:30", "1984-10-20 15:35:48"), rows("1961-04-12", "09:07:00", "1961-04-12 09:07:00")); } @@ -92,16 +106,21 @@ public void testCustomFormats2() { @Test @SneakyThrows public void testIncompleteFormats() { - String query = String.format("SELECT incomplete_1, incomplete_2, incorrect," - + "incomplete_custom_time, incomplete_custom_date FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT incomplete_1, incomplete_2, incorrect," + + "incomplete_custom_time, incomplete_custom_date FROM %s", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("incomplete_1", null, "timestamp"), schema("incomplete_2", null, "date"), schema("incorrect", null, "timestamp"), schema("incomplete_custom_time", null, "time"), schema("incomplete_custom_date", null, "date")); - verifyDataRows(result, + verifyDataRows( + result, rows("1984-01-01 00:00:00", null, null, "10:00:00", "1999-01-01"), rows("2012-01-01 00:00:00", null, null, "20:00:00", "3021-01-01")); } @@ -109,13 +128,13 @@ public void testIncompleteFormats() { @Test @SneakyThrows public void testNumericFormats() { - String query = String.format("SELECT epoch_sec, epoch_milli" - + " FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format("SELECT epoch_sec, epoch_milli" + " FROM %s", TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, - schema("epoch_sec", null, "timestamp"), - schema("epoch_milli", null, "timestamp")); - verifyDataRows(result, + verifySchema( + result, schema("epoch_sec", null, "timestamp"), schema("epoch_milli", null, "timestamp")); + verifyDataRows( + result, rows("1970-01-01 00:00:42", "1970-01-01 00:00:00.042"), rows("1970-01-02 03:55:00", "1970-01-01 00:01:40.5")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java index ab5aa46853..33eb8b693f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -60,12 +59,14 @@ public void resetTimeZone() { } @Test - public void testDateInGroupBy() throws IOException{ + public void testDateInGroupBy() throws IOException { JSONObject result = - executeQuery(String.format("SELECT DATE(birthdate) FROM %s GROUP BY DATE(birthdate)",TEST_INDEX_BANK) ); - verifySchema(result, - schema("DATE(birthdate)", null, "date")); - verifyDataRows(result, + executeQuery( + String.format( + "SELECT DATE(birthdate) FROM %s GROUP BY DATE(birthdate)", TEST_INDEX_BANK)); + verifySchema(result, schema("DATE(birthdate)", null, "date")); + verifyDataRows( + result, rows("2017-10-23"), rows("2017-11-20"), rows("2018-06-23"), @@ -78,9 +79,11 @@ public void testDateInGroupBy() throws IOException{ @Test public void testDateWithHavingClauseOnly() throws IOException { JSONObject result = - executeQuery(String.format("SELECT (TO_DAYS(DATE('2050-01-01')) - 693961) FROM %s HAVING (COUNT(1) > 0)",TEST_INDEX_BANK) ); - verifySchema(result, - schema("(TO_DAYS(DATE('2050-01-01')) - 693961)", null, "long")); + executeQuery( + String.format( + "SELECT (TO_DAYS(DATE('2050-01-01')) - 693961) FROM %s HAVING (COUNT(1) > 0)", + TEST_INDEX_BANK)); + verifySchema(result, schema("(TO_DAYS(DATE('2050-01-01')) - 693961)", null, "long")); verifyDataRows(result, rows(54787)); } @@ -107,83 +110,98 @@ public void testAddDateWithDays() throws IOException { public void testAddDateWithInterval() throws IOException { JSONObject result = executeQuery("select adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("adddate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("adddate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("adddate(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testDateAdd() throws IOException { JSONObject result = executeQuery("select date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("date_add(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("date_add(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("date_add(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - - result = executeQuery(String.format("SELECT DATE_ADD(birthdate, INTERVAL 1 YEAR) FROM %s", - TEST_INDEX_BANK)); - - verifySchema(result, - schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "datetime")); - verifyDataRows(result, + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + + result = + executeQuery( + String.format("SELECT DATE_ADD(birthdate, INTERVAL 1 YEAR) FROM %s", TEST_INDEX_BANK)); + + verifySchema(result, schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "datetime")); + verifyDataRows( + result, rows("2018-10-23 00:00:00"), rows("2018-11-20 00:00:00"), rows("2019-06-23 00:00:00"), @@ -197,38 +215,45 @@ public void testDateAdd() throws IOException { public void testDateSub() throws IOException { JSONObject result = executeQuery("select date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("date_sub(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test @@ -282,30 +307,34 @@ public void testDayOfMonthAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(22)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofmonth(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT day_of_month(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT dayofmonth(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT day_of_month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT dayofmonth(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT day_of_month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofmonth(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT day_of_month(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } + @Test public void testDayOfWeek() throws IOException { JSONObject result = executeQuery("select dayofweek(date('2020-09-16'))"); @@ -335,28 +364,31 @@ public void testDayOfWeekAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(3)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofweek(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT day_of_week(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT dayofweek(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT day_of_week(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT dayofweek(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT day_of_week(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofweek(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT day_of_week(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @@ -397,30 +429,34 @@ public void testDayOfYearAlternateSyntaxesReturnTheSameResults() throws IOExcept verifyDataRows(result1, rows(326)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofyear(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT day_of_year(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT dayofyear(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT day_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT dayofyear(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT day_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofyear(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT day_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } + @Test public void testFromDays() throws IOException { JSONObject result = executeQuery("select from_days(738049)"); @@ -450,13 +486,11 @@ public void testHour() throws IOException { @Test public void testHourOfDayWithUnderscores() throws IOException { JSONObject result = executeQuery("select hour_of_day(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema( - "hour_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema(result, schema("hour_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); result = executeQuery("select hour_of_day(datetime('2020-09-16 17:30:00'))"); - verifySchema(result, schema( - "hour_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); + verifySchema(result, schema("hour_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); result = executeQuery("select hour_of_day(time('17:30:00'))"); @@ -474,41 +508,45 @@ public void testHourOfDayWithUnderscores() throws IOException { @Test public void testExtractWithDatetime() throws IOException { - JSONObject datetimeResult = executeQuery( - String.format( - "SELECT extract(DAY_SECOND FROM datetime(cast(datetime0 AS STRING))) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject datetimeResult = + executeQuery( + String.format( + "SELECT extract(DAY_SECOND FROM datetime(cast(datetime0 AS STRING))) FROM %s LIMIT" + + " 1", + TEST_INDEX_CALCS)); verifyDataRows(datetimeResult, rows(9101735)); } @Test public void testExtractWithTime() throws IOException { - JSONObject timeResult = executeQuery( - String.format( - "SELECT extract(HOUR_SECOND FROM time0) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject timeResult = + executeQuery( + String.format( + "SELECT extract(HOUR_SECOND FROM time0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); verifyDataRows(timeResult, rows(210732)); - } @Test public void testExtractWithDate() throws IOException { - JSONObject dateResult = executeQuery( - String.format( - "SELECT extract(YEAR_MONTH FROM date0) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject dateResult = + executeQuery( + String.format( + "SELECT extract(YEAR_MONTH FROM date0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); verifyDataRows(dateResult, rows(200404)); } @Test public void testExtractWithDifferentTypesReturnSameResult() throws IOException { - JSONObject dateResult = executeQuery( - String.format("SELECT extract(YEAR_MONTH FROM datetime0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); + JSONObject dateResult = + executeQuery( + String.format( + "SELECT extract(YEAR_MONTH FROM datetime0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); - JSONObject datetimeResult = executeQuery( - String.format( - "SELECT extract(YEAR_MONTH FROM date(datetime0)) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject datetimeResult = + executeQuery( + String.format( + "SELECT extract(YEAR_MONTH FROM date(datetime0)) FROM %s LIMIT 1", + TEST_INDEX_CALCS)); dateResult.getJSONArray("datarows").similar(datetimeResult.getJSONArray("datarows")); } @@ -520,63 +558,55 @@ public void testHourFunctionAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(11)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT hour_of_day(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT hour_of_day(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT hour_of_day(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery(String.format("SELECT hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT hour_of_day(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT hour(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT hour_of_day(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT hour(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT hour_of_day(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @Test public void testLastDay() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT last_day(date0) FROM %s LIMIT 3", - TEST_INDEX_CALCS)); - verifyDataRows(result, - rows("2004-04-30"), - rows("1972-07-31"), - rows("1975-11-30")); - - result = executeQuery( - String.format("SELECT last_day(date0) FROM %s LIMIT 3", - TEST_INDEX_CALCS)); - verifyDataRows(result, - rows("2004-04-30"), - rows("1972-07-31"), - rows("1975-11-30")); - - result = executeQuery( - String.format("SELECT last_day(date0) FROM %s LIMIT 3", - TEST_INDEX_CALCS)); - verifyDataRows(result, - rows("2004-04-30"), - rows("1972-07-31"), - rows("1975-11-30")); + JSONObject result = + executeQuery(String.format("SELECT last_day(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + verifyDataRows(result, rows("2004-04-30"), rows("1972-07-31"), rows("1975-11-30")); + + result = + executeQuery(String.format("SELECT last_day(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + verifyDataRows(result, rows("2004-04-30"), rows("1972-07-31"), rows("1975-11-30")); + + result = + executeQuery(String.format("SELECT last_day(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + verifyDataRows(result, rows("2004-04-30"), rows("1972-07-31"), rows("1975-11-30")); } @Test public void testMicrosecond() throws IOException { JSONObject result = executeQuery("select microsecond(timestamp('2020-09-16 17:30:00.123456'))"); - verifySchema(result, - schema("microsecond(timestamp('2020-09-16 17:30:00.123456'))", null, "integer")); + verifySchema( + result, schema("microsecond(timestamp('2020-09-16 17:30:00.123456'))", null, "integer")); verifyDataRows(result, rows(123456)); // Explicit timestamp value with less than 6 microsecond digits result = executeQuery("select microsecond(timestamp('2020-09-16 17:30:00.1234'))"); - verifySchema(result, - schema("microsecond(timestamp('2020-09-16 17:30:00.1234'))", null, "integer")); + verifySchema( + result, schema("microsecond(timestamp('2020-09-16 17:30:00.1234'))", null, "integer")); verifyDataRows(result, rows(123400)); result = executeQuery("select microsecond(time('17:30:00.000010'))"); @@ -626,11 +656,11 @@ public void testMinute() throws IOException { verifyDataRows(result, rows(30)); } - @Test public void testMinuteOfDay() throws IOException { JSONObject result = executeQuery("select minute_of_day(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema( + result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(1050)); result = executeQuery("select minute_of_day(datetime('2020-09-16 17:30:00'))"); @@ -653,8 +683,8 @@ public void testMinuteOfDay() throws IOException { @Test public void testMinuteOfHour() throws IOException { JSONObject result = executeQuery("select minute_of_hour(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema( - "minute_of_hour(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema( + result, schema("minute_of_hour(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(30)); result = executeQuery("select minute_of_hour(time('17:30:00'))"); @@ -677,22 +707,29 @@ public void testMinuteFunctionAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(30)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT minute_of_hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT minute_of_hour(datetime(CAST(time0 AS STRING))) FROM %s", + TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT minute_of_hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT minute_of_hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT minute(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT minute_of_hour(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT minute(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT minute_of_hour(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @@ -718,7 +755,8 @@ public void testMonthOfYearTypes() throws IOException { verifyDataRows(result, rows(9)); result = executeQuery("select month_of_year(timestamp('2020-09-16 00:00:00'))"); - verifySchema(result, schema("month_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); + verifySchema( + result, schema("month_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); verifyDataRows(result, rows(9)); result = executeQuery("select month_of_year('2020-09-16')"); @@ -733,28 +771,31 @@ public void testMonthAlternateSyntaxesReturnTheSameResults() throws IOException verifyDataRows(result1, rows(11)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT month(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT month_of_year(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT month_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT month_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT month(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT month_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @@ -782,12 +823,9 @@ public void testQuarter() throws IOException { @Test public void testSecToTime() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT sec_to_time(balance) FROM %s LIMIT 3", TEST_INDEX_BANK)); - verifyDataRows(result, - rows("10:53:45"), - rows("01:34:46"), - rows("09:07:18")); + JSONObject result = + executeQuery(String.format("SELECT sec_to_time(balance) FROM %s LIMIT 3", TEST_INDEX_BANK)); + verifyDataRows(result, rows("10:53:45"), rows("01:34:46"), rows("09:07:18")); } @Test @@ -811,7 +849,8 @@ public void testSecond() throws IOException { public void testSecondOfMinute() throws IOException { JSONObject result = executeQuery("select second_of_minute(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema("second_of_minute(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema( + result, schema("second_of_minute(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(0)); result = executeQuery("select second_of_minute(time('17:30:00'))"); @@ -834,70 +873,68 @@ public void testSecondFunctionAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(34)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT second(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT second_of_minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT second(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT second_of_minute(datetime(CAST(time0 AS STRING))) FROM %s", + TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT second(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT second_of_minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT second(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT second_of_minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT second(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT second_of_minute(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT second(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT second_of_minute(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @Test public void testStrToDate() throws IOException { - //Ideal case - JSONObject result = executeQuery( - String.format("SELECT str_to_date(CAST(birthdate AS STRING)," - + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", - TEST_INDEX_BANK)); - verifyDataRows(result, - rows("2017-10-23 00:00:00"), - rows("2017-11-20 00:00:00") - ); - - //Bad string format case - result = executeQuery( - String.format("SELECT str_to_date(CAST(birthdate AS STRING)," - + " '%%Y %%s') FROM %s LIMIT 2", - TEST_INDEX_BANK)); - verifyDataRows(result, - rows((Object) null), - rows((Object) null) - ); - - //bad date format case - result = executeQuery( - String.format("SELECT str_to_date(firstname," - + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", - TEST_INDEX_BANK)); - verifyDataRows(result, - rows((Object) null), - rows((Object) null) - ); + // Ideal case + JSONObject result = + executeQuery( + String.format( + "SELECT str_to_date(CAST(birthdate AS STRING)," + + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", + TEST_INDEX_BANK)); + verifyDataRows(result, rows("2017-10-23 00:00:00"), rows("2017-11-20 00:00:00")); + + // Bad string format case + result = + executeQuery( + String.format( + "SELECT str_to_date(CAST(birthdate AS STRING)," + " '%%Y %%s') FROM %s LIMIT 2", + TEST_INDEX_BANK)); + verifyDataRows(result, rows((Object) null), rows((Object) null)); + + // bad date format case + result = + executeQuery( + String.format( + "SELECT str_to_date(firstname," + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", + TEST_INDEX_BANK)); + verifyDataRows(result, rows((Object) null), rows((Object) null)); } @Test public void testSubDateWithDays() throws IOException { - var result = - executeQuery("select subdate(date('2020-09-16'), 1)"); - verifySchema(result, - schema("subdate(date('2020-09-16'), 1)", null, "date")); + var result = executeQuery("select subdate(date('2020-09-16'), 1)"); + verifySchema(result, schema("subdate(date('2020-09-16'), 1)", null, "date")); verifyDataRows(result, rows("2020-09-15")); - result = - executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), 1)"); - verifySchema(result, - schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); + result = executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), 1)"); + verifySchema(result, schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(DATETIME('2020-09-16 07:40:00'), 1)"); @@ -913,60 +950,68 @@ public void testSubDateWithDays() throws IOException { public void testSubDateWithInterval() throws IOException { JSONObject result = executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("subdate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("subdate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("subdate(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test - public void testTimstampadd() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT timestampadd(WEEK, 2, time0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + public void testTimstampadd() throws IOException { + JSONObject result = + executeQuery( + String.format("SELECT timestampadd(WEEK, 2, time0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); - verifyDataRows(result, + verifyDataRows( + result, rows("1900-01-13 21:07:32"), rows("1900-01-15 13:48:48"), rows("1900-01-15 18:21:08")); } @Test - public void testTimstampdiff() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT timestampdiff(DAY, time0, datetime0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + public void testTimstampdiff() throws IOException { + JSONObject result = + executeQuery( + String.format( + "SELECT timestampdiff(DAY, time0, datetime0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); - verifyDataRows(result, - rows(38176), - rows(38191), - rows(38198)); + verifyDataRows(result, rows(38176), rows(38191), rows(38198)); } @Test @@ -993,16 +1038,20 @@ public void testToDays() throws IOException { @Test public void testToSeconds() throws IOException { - JSONObject result = executeQuery( - String.format("select to_seconds(date0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + JSONObject result = + executeQuery(String.format("select to_seconds(date0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(63249206400L), rows(62246275200L)); - result = executeQuery( - String.format("SELECT to_seconds(datetime(cast(datetime0 AS string))) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + result = + executeQuery( + String.format( + "SELECT to_seconds(datetime(cast(datetime0 AS string))) FROM %s LIMIT 2", + TEST_INDEX_CALCS)); verifyDataRows(result, rows(63256587455L), rows(63258064234L)); - result = executeQuery(String.format( - "select to_seconds(datetime0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + result = + executeQuery( + String.format("select to_seconds(datetime0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(63256587455L), rows(63258064234L)); } @@ -1017,11 +1066,14 @@ public void testYear() throws IOException { verifyDataRows(result, rows(2020)); } - private void week(String date, int mode, int expectedResult, String functionName) throws IOException { - JSONObject result = executeQuery(StringUtils.format("select %s(date('%s'), %d)", functionName, date, - mode)); - verifySchema(result, - schema(StringUtils.format("%s(date('%s'), %d)", functionName, date, mode), null, "integer")); + private void week(String date, int mode, int expectedResult, String functionName) + throws IOException { + JSONObject result = + executeQuery(StringUtils.format("select %s(date('%s'), %d)", functionName, date, mode)); + verifySchema( + result, + schema( + StringUtils.format("%s(date('%s'), %d)", functionName, date, mode), null, "integer")); verifyDataRows(result, rows(expectedResult)); } @@ -1040,7 +1092,8 @@ public void testWeek() throws IOException { @Test public void testWeekday() throws IOException { - JSONObject result = executeQuery(String.format("SELECT weekday(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + JSONObject result = + executeQuery(String.format("SELECT weekday(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); verifyDataRows(result, rows(3), rows(1), rows(2)); } @@ -1071,12 +1124,9 @@ public void testWeekOfYear() throws IOException { } private void compareWeekResults(String arg, String table) throws IOException { - JSONObject result1 = executeQuery(String.format( - "SELECT week(%s) FROM %s", arg, table)); - JSONObject result2 = executeQuery(String.format( - "SELECT week_of_year(%s) FROM %s", arg, table)); - JSONObject result3 = executeQuery(String.format( - "SELECT weekofyear(%s) FROM %s", arg, table)); + JSONObject result1 = executeQuery(String.format("SELECT week(%s) FROM %s", arg, table)); + JSONObject result2 = executeQuery(String.format("SELECT week_of_year(%s) FROM %s", arg, table)); + JSONObject result3 = executeQuery(String.format("SELECT weekofyear(%s) FROM %s", arg, table)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1.getJSONArray("datarows").similar(result3.getJSONArray("datarows")); @@ -1099,13 +1149,16 @@ public void testWeekAlternateSyntaxesReturnTheSameResults() throws IOException { @Test public void testYearweek() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT yearweek(time0), yearweek(time0, 4) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + JSONObject result = + executeQuery( + String.format( + "SELECT yearweek(time0), yearweek(time0, 4) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(189952, 189952), rows(189953, 190001)); } - void verifyDateFormat(String date, String type, String format, String formatted) throws IOException { + void verifyDateFormat(String date, String type, String format, String formatted) + throws IOException { String query = String.format("date_format(%s('%s'), '%s')", type, date, format); JSONObject result = executeQuery("select " + query); verifySchema(result, schema(query, null, "keyword")); @@ -1120,10 +1173,11 @@ void verifyDateFormat(String date, String type, String format, String formatted) @Test public void testDateFormat() throws IOException { String timestamp = "1998-01-31 13:14:15.012345"; - String timestampFormat = "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " - + "%m %p %r %S %s %T %% %P"; - String timestampFormatted = "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " - + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; + String timestampFormat = + "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " + "%m %p %r %S %s %T %% %P"; + String timestampFormatted = + "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " + + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; verifyDateFormat(timestamp, "timestamp", timestampFormat, timestampFormatted); String date = "1998-01-31"; @@ -1134,9 +1188,10 @@ public void testDateFormat() throws IOException { @Test public void testMakeTime() throws IOException { - var result = executeQuery( - "select MAKETIME(20, 30, 40) as f1, MAKETIME(20.2, 49.5, 42.100502) as f2"); - verifySchema(result, + var result = + executeQuery("select MAKETIME(20, 30, 40) as f1, MAKETIME(20.2, 49.5, 42.100502) as f2"); + verifySchema( + result, schema("MAKETIME(20, 30, 40)", "f1", "time"), schema("MAKETIME(20.2, 49.5, 42.100502)", "f2", "time")); verifyDataRows(result, rows("20:30:40", "20:50:42.100502")); @@ -1144,9 +1199,9 @@ public void testMakeTime() throws IOException { @Test public void testMakeDate() throws IOException { - var result = executeQuery( - "select MAKEDATE(1945, 5.9) as f1, MAKEDATE(1984, 1984) as f2"); - verifySchema(result, + var result = executeQuery("select MAKEDATE(1945, 5.9) as f1, MAKEDATE(1984, 1984) as f2"); + verifySchema( + result, schema("MAKEDATE(1945, 5.9)", "f1", "date"), schema("MAKEDATE(1984, 1984)", "f2", "date")); verifyDataRows(result, rows("1945-01-06", "1989-06-06")); @@ -1154,30 +1209,35 @@ public void testMakeDate() throws IOException { @Test public void testFromUnixTime() throws IOException { - var result = executeQuery( - "select FROM_UNIXTIME(200300400) f1, FROM_UNIXTIME(12224.12) f2, " - + "FROM_UNIXTIME(1662601316, '%T') f3"); - verifySchema(result, - schema("FROM_UNIXTIME(200300400)", "f1", "datetime"), + var result = + executeQuery( + "select FROM_UNIXTIME(200300400) f1, FROM_UNIXTIME(12224.12) f2, " + + "FROM_UNIXTIME(1662601316, '%T') f3"); + verifySchema( + result, + schema("FROM_UNIXTIME(200300400)", "f1", "datetime"), schema("FROM_UNIXTIME(12224.12)", "f2", "datetime"), schema("FROM_UNIXTIME(1662601316, '%T')", "f3", "keyword")); - verifySome(result.getJSONArray("datarows"), + verifySome( + result.getJSONArray("datarows"), rows("1976-05-07 07:00:00", "1970-01-01 03:23:44.12", "01:41:56")); } @Test - public void testGetFormatAsArgument() throws IOException{ + public void testGetFormatAsArgument() throws IOException { var result = executeQuery("SELECT DATE_FORMAT('2003-10-03',GET_FORMAT(DATE,'USA'))"); verifyDataRows(result, rows("10.03.2003")); } @Test public void testUnixTimeStamp() throws IOException { - var result = executeQuery( - "select UNIX_TIMESTAMP(MAKEDATE(1984, 1984)) f1, " - + "UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')) f2, " - + "UNIX_TIMESTAMP(20771122143845) f3"); - verifySchema(result, + var result = + executeQuery( + "select UNIX_TIMESTAMP(MAKEDATE(1984, 1984)) f1, " + + "UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')) f2, " + + "UNIX_TIMESTAMP(20771122143845) f3"); + verifySchema( + result, schema("UNIX_TIMESTAMP(MAKEDATE(1984, 1984))", "f1", "double"), schema("UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00'))", "f2", "double"), schema("UNIX_TIMESTAMP(20771122143845)", "f3", "double")); @@ -1186,9 +1246,9 @@ public void testUnixTimeStamp() throws IOException { @Test public void testPeriodAdd() throws IOException { - var result = executeQuery( - "select PERIOD_ADD(200801, 2) as f1, PERIOD_ADD(200801, -12) as f2"); - verifySchema(result, + var result = executeQuery("select PERIOD_ADD(200801, 2) as f1, PERIOD_ADD(200801, -12) as f2"); + verifySchema( + result, schema("PERIOD_ADD(200801, 2)", "f1", "integer"), schema("PERIOD_ADD(200801, -12)", "f2", "integer")); verifyDataRows(result, rows(200803, 200701)); @@ -1196,57 +1256,103 @@ public void testPeriodAdd() throws IOException { @Test public void testPeriodDiff() throws IOException { - var result = executeQuery( - "select PERIOD_DIFF(200802, 200703) as f1, PERIOD_DIFF(200802, 201003) as f2"); - verifySchema(result, + var result = + executeQuery("select PERIOD_DIFF(200802, 200703) as f1, PERIOD_DIFF(200802, 201003) as f2"); + verifySchema( + result, schema("PERIOD_DIFF(200802, 200703)", "f1", "integer"), schema("PERIOD_DIFF(200802, 201003)", "f2", "integer")); verifyDataRows(result, rows(11, -25)); } public void testAddTime() throws IOException { - var result = executeQuery("SELECT" - + " ADDTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' + 0`," - + " ADDTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' + 0`," - + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' + '23:59:59'`," - + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' + '00:05:42'`," - + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS `'15:42:13' + '09:07:00'`"); - verifySchema(result, + var result = + executeQuery( + "SELECT ADDTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' + 0`," + + " ADDTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' + 0`," + + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' + '23:59:59'`," + + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' + '00:05:42'`," + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " `'15:42:13' + '09:07:00'`"); + verifySchema( + result, schema("ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' + 0", "datetime"), schema("ADDTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' + 0", "time"), - schema("ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' + '23:59:59'", "datetime"), + schema( + "ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))", + "'2004-01-01' + '23:59:59'", + "datetime"), schema("ADDTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' + '00:05:42'", "time"), - schema("ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", "'15:42:13' + '09:07:00'", "datetime")); - verifyDataRows(result, rows("2008-12-12 00:00:00", "23:59:59", "2004-01-01 23:59:59", "10:26:12", "2000-01-01 00:49:13")); + schema( + "ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "'15:42:13' + '09:07:00'", + "datetime")); + verifyDataRows( + result, + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2004-01-01 23:59:59", + "10:26:12", + "2000-01-01 00:49:13")); } @Test public void testSubTime() throws IOException { - var result = executeQuery("SELECT" - + " SUBTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' - 0`," - + " SUBTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' - 0`," - + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' - '23:59:59'`," - + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' - '00:05:42'`," - + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS `'15:42:13' - '09:07:00'`"); - verifySchema(result, + var result = + executeQuery( + "SELECT SUBTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' - 0`," + + " SUBTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' - 0`," + + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' - '23:59:59'`," + + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' - '00:05:42'`," + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " `'15:42:13' - '09:07:00'`"); + verifySchema( + result, schema("SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' - 0", "datetime"), schema("SUBTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' - 0", "time"), - schema("SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' - '23:59:59'", "datetime"), + schema( + "SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))", + "'2004-01-01' - '23:59:59'", + "datetime"), schema("SUBTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' - '00:05:42'", "time"), - schema("SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", "'15:42:13' - '09:07:00'", "datetime")); - verifyDataRows(result, rows("2008-12-12 00:00:00", "23:59:59", "2003-12-31 00:00:01", "10:14:48", "1999-12-31 06:35:13")); + schema( + "SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "'15:42:13' - '09:07:00'", + "datetime")); + verifyDataRows( + result, + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2003-12-31 00:00:01", + "10:14:48", + "1999-12-31 06:35:13")); } public void testDateDiff() throws IOException { - var result = executeQuery("SELECT" - + " DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')) AS `'2000-01-02' - '2000-01-01'`," - + " DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00')) AS `'2001-02-01' - '2004-01-01'`," - + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30')) AS `'2004-01-01' - '2002-02-01'`," - + " DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) AS `today - today`"); - verifySchema(result, - schema("DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59'))", "'2000-01-02' - '2000-01-01'", "long"), - schema("DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00'))", "'2001-02-01' - '2004-01-01'", "long"), - schema("DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))", "'2004-01-01' - '2002-02-01'", "long"), + var result = + executeQuery( + "SELECT DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')) AS" + + " `'2000-01-02' - '2000-01-01'`, DATEDIFF(DATE('2001-02-01')," + + " TIMESTAMP('2004-01-01 00:00:00')) AS `'2001-02-01' - '2004-01-01'`," + + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30')) AS" + + " `'2004-01-01' - '2002-02-01'`, DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) AS" + + " `today - today`"); + verifySchema( + result, + schema( + "DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59'))", + "'2000-01-02' - '2000-01-01'", + "long"), + schema( + "DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00'))", + "'2001-02-01' - '2004-01-01'", + "long"), + schema( + "DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))", + "'2004-01-01' - '2002-02-01'", + "long"), schema("DATEDIFF(TIME('23:59:59'), TIME('00:00:00'))", "today - today", "long")); verifyDataRows(result, rows(1, -1064, 699, 0)); } @@ -1258,7 +1364,8 @@ public void testTimeDiff() throws IOException { verifyDataRows(result, rows("10:59:59")); } - void verifyTimeFormat(String time, String type, String format, String formatted) throws IOException { + void verifyTimeFormat(String time, String type, String format, String formatted) + throws IOException { String query = String.format("time_format(%s('%s'), '%s')", type, time, format); JSONObject result = executeQuery("select " + query); verifySchema(result, schema(query, null, "keyword")); @@ -1361,13 +1468,13 @@ public void testBracketedEquivalent() throws IOException { @Test public void testBracketFails() { - assertThrows(ResponseException.class, ()->executeQuery("select {time '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {t '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {date '17:30:00'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {d '17:30:00'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {timestamp '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {ts '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {timestamp '17:30:00'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {ts '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {time '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {t '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {date '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {d '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {timestamp '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {ts '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {timestamp '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {ts '17:30:00'}")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java index 94a5b4fb16..8ffa1df8f3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java @@ -16,7 +16,6 @@ public class DateTimeImplementationIT extends SQLIntegTestCase { - @Override public void init() throws Exception { super.init(); @@ -25,136 +24,118 @@ public void init() throws Exception { @Test public void inRangeZeroToStringTZ() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')"); - verifySchema(result, + var result = + executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')"); + verifySchema( + result, schema("DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')", null, "datetime")); verifyDataRows(result, rows("2008-12-24 21:30:00")); } @Test public void inRangeZeroToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-12-25 05:30:00+00:00', '+01:00')"); - verifySchema(result, - schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', '+01:00')"); + verifySchema( + result, schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "datetime")); verifyDataRows(result, rows("2008-12-25 06:30:00")); } @Test public void inRangeNegativeToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-12-25 05:30:00-05:00', '+05:00')"); - verifySchema(result, - schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00-05:00', '+05:00')"); + verifySchema( + result, schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "datetime")); verifyDataRows(result, rows("2008-12-25 15:30:00")); } @Test public void inRangeTwentyHourOffset() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2004-02-28 23:00:00-10:00', '+10:00')"); - verifySchema(result, - schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2004-02-28 23:00:00-10:00', '+10:00')"); + verifySchema( + result, schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "datetime")); verifyDataRows(result, rows("2004-02-29 19:00:00")); } @Test public void inRangeYearChange() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00', '-10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-10:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 06:00:00")); } @Test public void inRangeZeroNoToTZ() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00')"); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "datetime")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @Test public void inRangeZeroNoTZ() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00')"); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00')", null, "datetime")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @Test public void inRangeZeroDayConvert() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+12:00', '-12:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+12:00', '-12:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @Test public void inRangeJustInRangeNegative() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00', '-13:59')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-13:59')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 02:01:00")); } @Test public void inRangeJustInRangePositive() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+14:00', '-10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:00', '-10:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @Test public void nullField3Under() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00', '-14:01')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-14:01')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullField1Over() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+14:01', '-10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:01', '-10:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2021-02-30 10:00:00')"); - verifySchema(result, - schema("DATETIME('2021-02-30 10:00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2021-02-30 10:00:00')"); + verifySchema(result, schema("DATETIME('2021-02-30 10:00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2021-04-31 10:00:00')"); - verifySchema(result, - schema("DATETIME('2021-04-31 10:00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2021-04-31 10:00:00')"); + verifySchema(result, schema("DATETIME('2021-04-31 10:00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2021-13-03 10:00:00')"); - verifySchema(result, - schema("DATETIME('2021-13-03 10:00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2021-13-03 10:00:00')"); + verifySchema(result, schema("DATETIME('2021-13-03 10:00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java index 30211366b1..be1471641e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.is; @@ -23,15 +22,14 @@ import org.opensearch.sql.legacy.RestIntegTestCase; /** - * Integration test for different type of expressions such as literals, arithmetic, predicate - * and function expression. Since comparison test in {@link SQLCorrectnessIT} is enforced, - * this kind of manual written IT class will be focused on anomaly case test. + * Integration test for different type of expressions such as literals, arithmetic, predicate and + * function expression. Since comparison test in {@link SQLCorrectnessIT} is enforced, this kind of + * manual written IT class will be focused on anomaly case test. */ @Ignore public class ExpressionIT extends RestIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Override protected void init() throws Exception { @@ -44,8 +42,7 @@ public ResponseExceptionAssertion expectResponseException() { /** * Response exception assertion helper to assert property value in OpenSearch ResponseException - * and Response inside. This serves as syntax sugar to improve the readability of test - * code. + * and Response inside. This serves as syntax sugar to improve the readability of test code. */ private static class ResponseExceptionAssertion { private final ExpectedException exceptionRule; @@ -57,9 +54,12 @@ private ResponseExceptionAssertion(ExpectedException exceptionRule) { } ResponseExceptionAssertion hasStatusCode(int expected) { - exceptionRule.expect(featureValueOf("statusCode", is(expected), - (Function) e -> - e.getResponse().getStatusLine().getStatusCode())); + exceptionRule.expect( + featureValueOf( + "statusCode", + is(expected), + (Function) + e -> e.getResponse().getStatusLine().getStatusCode())); return this; } @@ -83,5 +83,4 @@ private static Response executeQuery(String query) throws IOException { return client().performRequest(request); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java index d55972691c..d0f890526b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java @@ -30,90 +30,126 @@ public void single_highlight_test() { String query = "SELECT Tags, highlight('Tags') FROM %s WHERE match(Tags, 'yeast') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("Tags", null, "text"), - schema("highlight('Tags')", null, "nested")); + verifySchema( + response, schema("Tags", null, "text"), schema("highlight('Tags')", null, "nested")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, - rows("alcohol-level yeast home-brew champagne", + verifyDataRows( + response, + rows( + "alcohol-level yeast home-brew champagne", new JSONArray(List.of("alcohol-level yeast home-brew champagne")))); } @Test public void highlight_optional_arguments_test() { - String query = "SELECT highlight('Tags', pre_tags='', post_tags='') " + - "FROM %s WHERE match(Tags, 'yeast') LIMIT 1"; + String query = + "SELECT highlight('Tags', pre_tags='', post_tags='') " + + "FROM %s WHERE match(Tags, 'yeast') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("highlight('Tags', pre_tags='', post_tags='')", - null, "nested")); + verifySchema( + response, + schema("highlight('Tags', pre_tags='', post_tags='')", null, "nested")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, + verifyDataRows( + response, rows(new JSONArray(List.of("alcohol-level yeast home-brew champagne")))); } @Test public void highlight_multiple_optional_arguments_test() { - String query = "SELECT highlight(Title), highlight(Body, pre_tags='', post_tags='') FROM %s WHERE multi_match([Title, Body], 'IPA') LIMIT 1"; + String query = + "SELECT highlight(Title), highlight(Body, pre_tags='', post_tags='') FROM %s WHERE multi_match([Title, Body], 'IPA')" + + " LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("highlight(Title)", null, "nested"), - schema("highlight(Body, pre_tags='', " + - "post_tags='')", null, "nested")); + verifySchema( + response, + schema("highlight(Title)", null, "nested"), + schema( + "highlight(Body, pre_tags='', " + + "post_tags='')", + null, + "nested")); assertEquals(1, response.getInt("size")); - verifyDataRows(response, rows(new JSONArray(List.of("What are the differences between an IPA" + - " and its variants?")), - new JSONArray(List.of("

I know what makes an IPA" + - " an IPA, but what are the unique characteristics of it's" + - " common variants?", - "To be specific, the ones I'm interested in are Double IPA " + - "and Black IPA, but general differences" + - " between")))); + verifyDataRows( + response, + rows( + new JSONArray( + List.of("What are the differences between an IPA" + " and its variants?")), + new JSONArray( + List.of( + "

I know what makes an IPA an" + + " IPA, but what are the" + + " unique characteristics of it's common variants?", + "To be specific, the ones I'm interested in are Double IPA and Black IPA, but general differences" + + " between")))); } @Test public void multiple_highlight_test() { - String query = "SELECT highlight(Title), highlight(Tags) FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; + String query = + "SELECT highlight(Title), highlight(Tags) FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops')" + + " LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("highlight(Title)", null, "nested"), + verifySchema( + response, + schema("highlight(Title)", null, "nested"), schema("highlight(Tags)", null, "nested")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, - rows( new JSONArray(List.of("What uses do hops have outside of brewing?")), + verifyDataRows( + response, + rows( + new JSONArray(List.of("What uses do hops have outside of brewing?")), new JSONArray(List.of("hops history")))); } @Test public void wildcard_highlight_test() { - String query = "SELECT highlight('*itle') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; + String query = + "SELECT highlight('*itle') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); verifySchema(response, schema("highlight('*itle')", null, "object")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, rows(new JSONObject(ImmutableMap.of( - "Title", new JSONArray(List.of("What uses do hops have outside of brewing?")))))); + verifyDataRows( + response, + rows( + new JSONObject( + ImmutableMap.of( + "Title", + new JSONArray( + List.of("What uses do hops have outside of brewing?")))))); } @Test public void wildcard_multi_field_highlight_test() { - String query = "SELECT highlight('T*') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; + String query = + "SELECT highlight('T*') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); verifySchema(response, schema("highlight('T*')", null, "object")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, rows(new JSONObject(ImmutableMap.of( - "Title", new JSONArray(List.of("What uses do hops have outside of brewing?")), - "Tags", new JSONArray(List.of("hops history")))))); + verifyDataRows( + response, + rows( + new JSONObject( + ImmutableMap.of( + "Title", + new JSONArray( + List.of("What uses do hops have outside of brewing?")), + "Tags", new JSONArray(List.of("hops history")))))); } @Test @@ -124,9 +160,15 @@ public void highlight_all_test() { verifySchema(response, schema("highlight('*')", null, "object")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, rows(new JSONObject(ImmutableMap.of( - "Title", new JSONArray(List.of("What uses do hops have outside of brewing?")), - "Tags", new JSONArray(List.of("hops history")))))); + verifyDataRows( + response, + rows( + new JSONObject( + ImmutableMap.of( + "Title", + new JSONArray( + List.of("What uses do hops have outside of brewing?")), + "Tags", new JSONArray(List.of("hops history")))))); } @Test @@ -136,14 +178,23 @@ public void highlight_no_limit_test() { verifySchema(response, schema("highlight(Body)", null, "nested")); assertEquals(2, response.getInt("total")); - verifyDataRows(response, rows(new JSONArray(List.of("Boiling affects hops, by boiling" + - " off the aroma and extracting more of the organic acids that provide"))), - - rows(new JSONArray(List.of("

Do hops have (or had in the past) any use outside of brewing beer?", - "when-was-the-first-beer-ever-brewed\">dating first modern beers we have the first record" + - " of cultivating hops", - "predating the first record of use of hops in beer by nearly a century.", - "Could the hops have been cultivated for any other purpose than brewing, " + - "or can we safely assume if they")))); + verifyDataRows( + response, + rows( + new JSONArray( + List.of( + "Boiling affects hops, by boiling" + + " off the aroma and extracting more of the organic acids that provide"))), + rows( + new JSONArray( + List.of( + "

Do hops have (or had in the past) any use outside of brewing" + + " beer?", + "when-was-the-first-beer-ever-brewed\">dating first modern beers we have" + + " the first record of cultivating hops", + "predating the first record of use of hops in beer by nearly a" + + " century.", + "Could the hops have been cultivated for any other purpose than" + + " brewing, or can we safely assume if they")))); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java index 8d3f9e1509..2c1796f0c3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -19,9 +18,7 @@ import org.opensearch.client.Request; import org.opensearch.sql.legacy.SQLIntegTestCase; -/** - * Integration tests for identifiers including index and field name symbol. - */ +/** Integration tests for identifiers including index and field name symbol. */ public class IdentifierIT extends SQLIntegTestCase { @Test @@ -46,12 +43,13 @@ public void testQuotedIndexNames() throws IOException { @Test public void testSpecialFieldName() throws IOException { - new Index("test") - .addDoc("{\"@timestamp\": 10, \"dimensions:major_version\": 30}"); - final JSONObject result = new JSONObject(executeQuery("SELECT @timestamp, " - + "`dimensions:major_version` FROM test", "jdbc")); + new Index("test").addDoc("{\"@timestamp\": 10, \"dimensions:major_version\": 30}"); + final JSONObject result = + new JSONObject( + executeQuery("SELECT @timestamp, " + "`dimensions:major_version` FROM test", "jdbc")); - verifySchema(result, + verifySchema( + result, schema("@timestamp", null, "long"), schema("dimensions:major_version", null, "long")); verifyDataRows(result, rows(10, 30)); @@ -66,12 +64,11 @@ public void testMultipleQueriesWithSpecialIndexNames() throws IOException { @Test public void testDoubleUnderscoreIdentifierTest() throws IOException { - new Index("test.twounderscores") - .addDoc("{\"__age\": 30}"); - final JSONObject result = new JSONObject(executeQuery("SELECT __age FROM test.twounderscores", "jdbc")); + new Index("test.twounderscores").addDoc("{\"__age\": 30}"); + final JSONObject result = + new JSONObject(executeQuery("SELECT __age FROM test.twounderscores", "jdbc")); - verifySchema(result, - schema("__age", null, "long")); + verifySchema(result, schema("__age", null, "long")); verifyDataRows(result, rows(30)); } @@ -83,19 +80,20 @@ public void testMetafieldIdentifierTest() throws IOException { new Index(index).addDoc("{\"age\": 30}", id); // Execute using field metadata values - final JSONObject result = new JSONObject(executeQuery( - "SELECT *, _id, _index, _score, _maxscore, _sort " - + "FROM " + index, - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery( + "SELECT *, _id, _index, _score, _maxscore, _sort " + "FROM " + index, "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, - schema("age", null, "long"), - schema("_id", null, "keyword"), - schema("_index", null, "keyword"), - schema("_score", null, "float"), - schema("_maxscore", null, "float"), - schema("_sort", null, "long")); + verifySchema( + result, + schema("age", null, "long"), + schema("_id", null, "keyword"), + schema("_index", null, "keyword"), + schema("_score", null, "float"), + schema("_maxscore", null, "float"), + schema("_sort", null, "long")); verifyDataRows(result, rows(30, id, index, 1.0, 1.0, -2)); } @@ -113,13 +111,13 @@ public void testMetafieldIdentifierRoutingSelectTest() throws IOException { .addDocWithShardId("{\"age\": 35}", "test5", "test5"); // Execute using field metadata values filtering on the routing shard hash id - final JSONObject result = new JSONObject(executeQuery( - "SELECT age, _id, _index, _routing " - + "FROM " + index, - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery("SELECT age, _id, _index, _routing " + "FROM " + index, "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, + verifySchema( + result, schema("age", null, "long"), schema("_id", null, "keyword"), schema("_index", null, "keyword"), @@ -151,14 +149,19 @@ public void testMetafieldIdentifierRoutingFilterTest() throws IOException { .addDocWithShardId("{\"age\": 36}", "test6", "test6"); // Execute using field metadata values filtering on the routing shard hash id - final JSONObject result = new JSONObject(executeQuery( - "SELECT _id, _index, _routing " - + "FROM " + index + " " - + "WHERE _routing = \\\"test4\\\"", - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery( + "SELECT _id, _index, _routing " + + "FROM " + + index + + " " + + "WHERE _routing = \\\"test4\\\"", + "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, + verifySchema( + result, schema("_id", null, "keyword"), schema("_index", null, "keyword"), schema("_routing", null, "keyword")); @@ -170,7 +173,6 @@ public void testMetafieldIdentifierRoutingFilterTest() throws IOException { assertEquals("test4", datarows.getJSONArray(0).getString(0)); // note that _routing in the SELECT clause returns the shard, not the routing hash id assertTrue(datarows.getJSONArray(0).getString(2).contains("[" + index + "]")); - } @Test @@ -181,14 +183,21 @@ public void testMetafieldIdentifierWithAliasTest() throws IOException { new Index(index).addDoc("{\"age\": 30}", id); // Execute using field metadata values - final JSONObject result = new JSONObject(executeQuery( - "SELECT _id AS A, _index AS B, _score AS C, _maxscore AS D, _sort AS E " - + "FROM " + index + " " - + "WHERE _id = \\\"" + id + "\\\"", - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery( + "SELECT _id AS A, _index AS B, _score AS C, _maxscore AS D, _sort AS E " + + "FROM " + + index + + " " + + "WHERE _id = \\\"" + + id + + "\\\"", + "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, + verifySchema( + result, schema("_id", "A", "keyword"), schema("_index", "B", "keyword"), schema("_score", "C", "float"), @@ -209,9 +218,7 @@ private void queryAndAssertTheDoc(String sql) { verifyDataRows(result, rows(30)); } - /** - * Index abstraction for test code readability. - */ + /** Index abstraction for test code readability. */ private static class Index { private final String indexName; @@ -241,18 +248,20 @@ void addDoc(String doc) { } public Index addDoc(String doc, String id) { - Request indexDoc = new Request("POST", String.format("/%s/_doc/%s?refresh=true", indexName, id)); + Request indexDoc = + new Request("POST", String.format("/%s/_doc/%s?refresh=true", indexName, id)); indexDoc.setJsonEntity(doc); performRequest(client(), indexDoc); return this; } public Index addDocWithShardId(String doc, String id, String routing) { - Request indexDoc = new Request("POST", String.format("/%s/_doc/%s?refresh=true&routing=%s", indexName, id, routing)); + Request indexDoc = + new Request( + "POST", String.format("/%s/_doc/%s?refresh=true&routing=%s", indexName, id, routing)); indexDoc.setJsonEntity(doc); performRequest(client(), indexDoc); return this; } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java index 4b158d73df..f36992b1d0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -23,11 +22,16 @@ protected void init() throws Exception { @Test public void testSimpleDataTypesInSchema() { - JSONObject response = new JSONObject(executeQuery( - "SELECT account_number, address, age, birthdate, city, male, state " - + "FROM " + TEST_INDEX_BANK, "jdbc")); - - verifySchema(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT account_number, address, age, birthdate, city, male, state " + + "FROM " + + TEST_INDEX_BANK, + "jdbc")); + + verifySchema( + response, schema("account_number", "long"), schema("address", "text"), schema("age", "integer"), @@ -39,10 +43,10 @@ public void testSimpleDataTypesInSchema() { @Test public void testAliasInSchema() { - JSONObject response = new JSONObject(executeQuery( - "SELECT account_number AS acc FROM " + TEST_INDEX_BANK, "jdbc")); + JSONObject response = + new JSONObject( + executeQuery("SELECT account_number AS acc FROM " + TEST_INDEX_BANK, "jdbc")); verifySchema(response, schema("account_number", "acc", "long")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java index 1f85b2857f..e9c0fd2c55 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java @@ -24,9 +24,7 @@ import org.opensearch.sql.legacy.SQLIntegTestCase; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * For backward compatibility, check if legacy API endpoints are accessible. - */ +/** For backward compatibility, check if legacy API endpoints are accessible. */ public class LegacyAPICompatibilityIT extends SQLIntegTestCase { @Override @@ -56,8 +54,8 @@ public void explain() throws IOException { @Test public void closeCursor() throws IOException { - String sql = StringUtils.format( - "SELECT firstname FROM %s WHERE balance > 100", TEST_INDEX_ACCOUNT); + String sql = + StringUtils.format("SELECT firstname FROM %s WHERE balance > 100", TEST_INDEX_ACCOUNT); JSONObject result = new JSONObject(executeFetchQuery(sql, 50, "jdbc")); Request request = new Request("POST", LEGACY_CURSOR_CLOSE_ENDPOINT); @@ -76,44 +74,36 @@ public void stats() throws IOException { @Test public void legacySettingsLegacyEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"opendistro.sql.query.slowlog\": \"10\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"opendistro.sql.query.slowlog\": \"10\"" + " }" + "}"; Response response = updateSetting(LEGACY_SQL_SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void legacySettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"opendistro.query.size_limit\": \"100\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"opendistro.query.size_limit\": \"100\"" + " }" + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void newSettingsLegacyEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"plugins.sql.slowlog\": \"10\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"plugins.sql.slowlog\": \"10\"" + " }" + "}"; Response response = updateSetting(LEGACY_SQL_SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void newSettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"plugins.query.metrics.rolling_interval\": \"80\"" - + " }" - + "}"; + String requestBody = + "{" + + " \"persistent\": {" + + " \"plugins.query.metrics.rolling_interval\": \"80\"" + + " }" + + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @@ -130,5 +120,4 @@ private RequestOptions.Builder buildJsonOption() { restOptionsBuilder.addHeader("Content-Type", "application/json"); return restOptionsBuilder; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java index 0dbb0404f9..c5ff50898a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; @@ -23,9 +22,11 @@ protected void init() throws Exception { @Test public void test_like_in_select() throws IOException { - String query = "SELECT KeywordBody, KeywordBody LIKE 'test wildcard%' FROM " + TEST_INDEX_WILDCARD; + String query = + "SELECT KeywordBody, KeywordBody LIKE 'test wildcard%' FROM " + TEST_INDEX_WILDCARD; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard", true), rows("test wildcard in the end of the text%", true), rows("%test wildcard in the beginning of the text", false), @@ -40,9 +41,11 @@ public void test_like_in_select() throws IOException { @Test public void test_like_in_select_with_escaped_percent() throws IOException { - String query = "SELECT KeywordBody, KeywordBody LIKE '\\\\%test wildcard%' FROM " + TEST_INDEX_WILDCARD; + String query = + "SELECT KeywordBody, KeywordBody LIKE '\\\\%test wildcard%' FROM " + TEST_INDEX_WILDCARD; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard", false), rows("test wildcard in the end of the text%", false), rows("%test wildcard in the beginning of the text", true), @@ -57,9 +60,11 @@ public void test_like_in_select_with_escaped_percent() throws IOException { @Test public void test_like_in_select_with_escaped_underscore() throws IOException { - String query = "SELECT KeywordBody, KeywordBody LIKE '\\\\_test wildcard%' FROM " + TEST_INDEX_WILDCARD; + String query = + "SELECT KeywordBody, KeywordBody LIKE '\\\\_test wildcard%' FROM " + TEST_INDEX_WILDCARD; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard", false), rows("test wildcard in the end of the text%", false), rows("%test wildcard in the beginning of the text", false), @@ -74,9 +79,13 @@ public void test_like_in_select_with_escaped_underscore() throws IOException { @Test public void test_like_in_where() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE KeywordBody LIKE 'test wildcard%'"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE KeywordBody LIKE 'test wildcard%'"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard"), rows("test wildcard in the end of the text%"), rows("test wildcard in % the middle of the text"), @@ -88,18 +97,22 @@ public void test_like_in_where() throws IOException { @Test public void test_like_in_where_with_escaped_percent() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE KeywordBody LIKE '\\\\%test wildcard%'"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE KeywordBody LIKE '\\\\%test wildcard%'"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("%test wildcard in the beginning of the text")); + verifyDataRows(result, rows("%test wildcard in the beginning of the text")); } @Test public void test_like_in_where_with_escaped_underscore() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE KeywordBody LIKE '\\\\_test wildcard%'"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE KeywordBody LIKE '\\\\_test wildcard%'"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("_test wildcard in the beginning of the text")); + verifyDataRows(result, rows("_test wildcard in the beginning of the text")); } @Test @@ -118,7 +131,8 @@ public void test_like_on_text_keyword_field_with_one_word() throws IOException { @Test public void test_like_on_text_keyword_field_with_greater_than_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE TextKeywordBody LIKE 'test wild*'"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE TextKeywordBody LIKE 'test wild*'"; JSONObject result = executeJdbcRequest(query); assertEquals(7, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java index 1c959c5460..c81cc8e4f5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java @@ -23,32 +23,30 @@ public void init() throws IOException { @Test public void query_matches_test() throws IOException { - String query = "SELECT phrase FROM " - + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'quick')"; + String query = + "SELECT phrase FROM " + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'quick')"; var result = new JSONObject(executeQuery(query, "jdbc")); verifySchema(result, schema("phrase", "text")); - verifyDataRows(result, - rows("quick fox"), - rows("quick fox here")); + verifyDataRows(result, rows("quick fox"), rows("quick fox here")); } @Test public void additional_parameters_test() throws IOException { - String query = "SELECT phrase FROM " - + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, '2 test', minimum_should_match=1, fuzziness=2)"; + String query = + "SELECT phrase FROM " + + TEST_INDEX_PHRASE + + " WHERE match_bool_prefix(phrase, '2 test', minimum_should_match=1, fuzziness=2)"; var result = new JSONObject(executeQuery(query, "jdbc")); verifySchema(result, schema("phrase", "text")); - verifyDataRows(result, - rows("my test"), - rows("my test 2")); + verifyDataRows(result, rows("my test"), rows("my test 2")); } @Test public void no_matches_test() throws IOException { - String query = "SELECT * FROM " - + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'rice')"; + String query = + "SELECT * FROM " + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'rice')"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(0, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java index 9885ddfa33..5bde838e19 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java @@ -29,93 +29,119 @@ public void init() throws IOException { @Test public void match_in_where() throws IOException { - JSONObject result = executeJdbcRequest("SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE match(lastname, 'Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE match(lastname, 'Bates')"); verifySchema(result, schema("firstname", "text")); verifyDataRows(result, rows("Nanette")); } @Test public void match_in_having() throws IOException { - JSONObject result = executeJdbcRequest("SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void missing_field_test() { - String query = StringUtils.format("SELECT * FROM %s WHERE match(invalid, 'Bates')", TEST_INDEX_ACCOUNT); + String query = + StringUtils.format("SELECT * FROM %s WHERE match(invalid, 'Bates')", TEST_INDEX_ACCOUNT); final RuntimeException exception = expectThrows(RuntimeException.class, () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage() - .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); + assertTrue( + exception + .getMessage() + .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); assertTrue(exception.getMessage().contains("SemanticCheckException")); } @Test public void missing_quoted_field_test() { - String query = StringUtils.format("SELECT * FROM %s WHERE match('invalid', 'Bates')", TEST_INDEX_ACCOUNT); + String query = + StringUtils.format("SELECT * FROM %s WHERE match('invalid', 'Bates')", TEST_INDEX_ACCOUNT); final RuntimeException exception = expectThrows(RuntimeException.class, () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage() - .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); + assertTrue( + exception + .getMessage() + .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); assertTrue(exception.getMessage().contains("SemanticCheckException")); } @Test public void missing_backtick_field_test() { - String query = StringUtils.format("SELECT * FROM %s WHERE match(`invalid`, 'Bates')", TEST_INDEX_ACCOUNT); + String query = + StringUtils.format("SELECT * FROM %s WHERE match(`invalid`, 'Bates')", TEST_INDEX_ACCOUNT); final RuntimeException exception = expectThrows(RuntimeException.class, () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage() - .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); + assertTrue( + exception + .getMessage() + .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); assertTrue(exception.getMessage().contains("SemanticCheckException")); } @Test public void matchquery_in_where() throws IOException { - JSONObject result = executeJdbcRequest("SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE matchquery(lastname, 'Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE matchquery(lastname, 'Bates')"); verifySchema(result, schema("firstname", "text")); verifyDataRows(result, rows("Nanette")); } @Test public void matchquery_in_having() throws IOException { - JSONObject result = executeJdbcRequest("SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING matchquery(firstname, 'Nanette')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + + TEST_INDEX_ACCOUNT + + " HAVING matchquery(firstname, 'Nanette')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void match_query_in_where() throws IOException { - JSONObject result = executeJdbcRequest("SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE match_query(lastname, 'Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT firstname FROM " + + TEST_INDEX_ACCOUNT + + " WHERE match_query(lastname, 'Bates')"); verifySchema(result, schema("firstname", "text")); verifyDataRows(result, rows("Nanette")); } @Test public void match_query_in_having() throws IOException { - JSONObject result = executeJdbcRequest( - "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match_query(firstname, 'Nanette')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + + TEST_INDEX_ACCOUNT + + " HAVING match_query(firstname, 'Nanette')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void match_aliases_return_the_same_results() throws IOException { - String query1 = "SELECT lastname FROM " - + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"; + String query1 = + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT lastname FROM " - + TEST_INDEX_ACCOUNT + " HAVING matchquery(firstname, 'Nanette')"; + String query2 = + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING matchquery(firstname, 'Nanette')"; JSONObject result2 = executeJdbcRequest(query2); - String query3 = "SELECT lastname FROM " - + TEST_INDEX_ACCOUNT + " HAVING match_query(firstname, 'Nanette')"; + String query3 = + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match_query(firstname, 'Nanette')"; JSONObject result3 = executeJdbcRequest(query3); assertEquals(result1.getInt("total"), result2.getInt("total")); assertEquals(result1.getInt("total"), result3.getInt("total")); @@ -123,30 +149,33 @@ public void match_aliases_return_the_same_results() throws IOException { @Test public void match_query_alternate_syntax() throws IOException { - JSONObject result = executeJdbcRequest( - "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " WHERE lastname = match_query('Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + + TEST_INDEX_ACCOUNT + + " WHERE lastname = match_query('Bates')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void matchquery_alternate_syntax() throws IOException { - JSONObject result = executeJdbcRequest( - "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " WHERE lastname = matchquery('Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " WHERE lastname = matchquery('Bates')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void match_alternate_syntaxes_return_the_same_results() throws IOException { - String query1 = "SELECT * FROM " - + TEST_INDEX_ACCOUNT + " WHERE match(firstname, 'Nanette')"; + String query1 = "SELECT * FROM " + TEST_INDEX_ACCOUNT + " WHERE match(firstname, 'Nanette')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT * FROM " - + TEST_INDEX_ACCOUNT + " WHERE firstname = match_query('Nanette')"; + String query2 = + "SELECT * FROM " + TEST_INDEX_ACCOUNT + " WHERE firstname = match_query('Nanette')"; JSONObject result2 = executeJdbcRequest(query2); - String query3 = "SELECT * FROM " - + TEST_INDEX_ACCOUNT + " WHERE firstname = matchquery('Nanette')"; + String query3 = + "SELECT * FROM " + TEST_INDEX_ACCOUNT + " WHERE firstname = matchquery('Nanette')"; JSONObject result3 = executeJdbcRequest(query3); assertEquals(result1.getInt("total"), result2.getInt("total")); assertEquals(result1.getInt("total"), result3.getInt("total")); @@ -154,11 +183,16 @@ public void match_alternate_syntaxes_return_the_same_results() throws IOExceptio @Test public void matchPhraseQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where address= matchPhrase('671 Bristol Street') order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\\\"match_phrase\\\":{\\\"address\\\":{\\\"query\\\":\\\"671 Bristol Street\\\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= matchPhrase('671 Bristol Street') order by" + + " _score desc limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + containsString( + "{\\\"match_phrase\\\":{\\\"address\\\":{\\\"query\\\":\\\"671 Bristol Street\\\"")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java index 3b7e65dcc6..d08149aa96 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_PHRASE; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java index dd2a8384d6..f181a18689 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; @@ -26,7 +25,8 @@ protected void init() throws Exception { public void required_parameters() throws IOException { String query = "SELECT Title FROM %s WHERE match_phrase_prefix(Title, 'champagne be')"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, + verifyDataRows( + result, rows("Can old flat champagne be used for vinegar?"), rows("Elder flower champagne best to use natural yeast or add a wine yeast?")); } @@ -34,9 +34,10 @@ public void required_parameters() throws IOException { @Test public void all_optional_parameters() throws IOException { // The values for optional parameters are valid but arbitrary. - String query = "SELECT Title FROM %s " + - "WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, zero_terms_query='ALL', " + - "max_expansions = 2, analyzer=standard, slop=0)"; + String query = + "SELECT Title FROM %s " + + "WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, zero_terms_query='ALL', " + + "max_expansions = 2, analyzer=standard, slop=0)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Can old flat champagne be used for vinegar?")); } @@ -47,21 +48,22 @@ public void max_expansions_is_3() throws IOException { // It tells OpenSearch to consider only the first 3 terms that start with 'bottl' // In this dataset these are 'bottle-conditioning', 'bottling', 'bottles'. - String query = "SELECT Tags FROM %s " + - "WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3)"; + String query = + "SELECT Tags FROM %s " + + "WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, rows("brewing draught bottling"), - rows("draught bottles")); + verifyDataRows(result, rows("brewing draught bottling"), rows("draught bottles")); } @Test public void analyzer_english() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // This results in an empty query. - String query = "SELECT Title FROM %s " + - "WHERE match_phrase_prefix(Title, 'in to', analyzer=english)"; + String query = + "SELECT Title FROM %s " + "WHERE match_phrase_prefix(Title, 'in to', analyzer=english)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - assertTrue("Expect English analyzer to filter out common words 'in' and 'to'", + assertTrue( + "Expect English analyzer to filter out common words 'in' and 'to'", result.getInt("total") == 0); } @@ -69,8 +71,8 @@ public void analyzer_english() throws IOException { public void analyzer_standard() throws IOException { // Standard analyzer does not treat 'in' and 'to' as special terms. // This results in 'to' being used as a phrase prefix given us 'Tokyo'. - String query = "SELECT Title FROM %s " + - "WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)"; + String query = + "SELECT Title FROM %s " + "WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Local microbreweries and craft beer in Tokyo")); } @@ -80,15 +82,15 @@ public void zero_term_query_all() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // zero_terms_query of 'ALL' causes all rows to be returned. // ORDER BY ... LIMIT helps make the test understandable. - String query = "SELECT Title FROM %s" + - " WHERE match_phrase_prefix(Title, 'in to', analyzer=english, zero_terms_query='ALL')" + - " ORDER BY Title DESC" + - " LIMIT 1"; + String query = + "SELECT Title FROM %s" + + " WHERE match_phrase_prefix(Title, 'in to', analyzer=english, zero_terms_query='ALL')" + + " ORDER BY Title DESC" + + " LIMIT 1"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("was working great, now all foam")); } - @Test public void slop_is_2() throws IOException { // When slop is 2, the terms are matched exactly in the order specified. @@ -103,8 +105,6 @@ public void slop_is_3() throws IOException { // When slop is 3, results will include phrases where the query terms are transposed. String query = "SELECT Tags from %s where match_phrase_prefix(Tags, 'gas ta', slop=3)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, - rows("taste draught gas"), - rows("taste gas")); + verifyDataRows(result, rows("taste draught gas"), rows("taste gas")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java index cbb39ead40..60b7632ad0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -34,9 +33,8 @@ public void init() throws Exception { @Test public void testPI() throws IOException { JSONObject result = - executeQuery(String.format("SELECT PI() FROM %s HAVING (COUNT(1) > 0)",TEST_INDEX_BANK) ); - verifySchema(result, - schema("PI()", null, "double")); + executeQuery(String.format("SELECT PI() FROM %s HAVING (COUNT(1) > 0)", TEST_INDEX_BANK)); + verifySchema(result, schema("PI()", null, "double")); verifyDataRows(result, rows(3.141592653589793)); } @@ -97,7 +95,8 @@ public void testE() throws IOException { @Test public void testExpm1() throws IOException { - JSONObject result = executeQuery("select expm1(account_number) FROM " + TEST_INDEX_BANK + " LIMIT 2"); + JSONObject result = + executeQuery("select expm1(account_number) FROM " + TEST_INDEX_BANK + " LIMIT 2"); verifySchema(result, schema("expm1(account_number)", null, "double")); verifyDataRows(result, rows(Math.expm1(1)), rows(Math.expm1(6))); } @@ -333,36 +332,28 @@ public void testCbrt() throws IOException { @Test public void testLnReturnsNull() throws IOException { JSONObject result = executeQuery("select ln(0), ln(-2)"); - verifySchema(result, - schema("ln(0)", "double"), - schema("ln(-2)", "double")); + verifySchema(result, schema("ln(0)", "double"), schema("ln(-2)", "double")); verifyDataRows(result, rows(null, null)); } @Test public void testLogReturnsNull() throws IOException { JSONObject result = executeQuery("select log(0), log(-2)"); - verifySchema(result, - schema("log(0)", "double"), - schema("log(-2)", "double")); + verifySchema(result, schema("log(0)", "double"), schema("log(-2)", "double")); verifyDataRows(result, rows(null, null)); } @Test public void testLog10ReturnsNull() throws IOException { JSONObject result = executeQuery("select log10(0), log10(-2)"); - verifySchema(result, - schema("log10(0)", "double"), - schema("log10(-2)", "double")); + verifySchema(result, schema("log10(0)", "double"), schema("log10(-2)", "double")); verifyDataRows(result, rows(null, null)); } @Test public void testLog2ReturnsNull() throws IOException { JSONObject result = executeQuery("select log2(0), log2(-2)"); - verifySchema(result, - schema("log2(0)", "double"), - schema("log2(-2)", "double")); + verifySchema(result, schema("log2(0)", "double"), schema("log2(-2)", "double")); verifyDataRows(result, rows(null, null)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java index 2a26eb19fe..4bbab4f167 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -40,9 +39,7 @@ public void requestCount() throws IOException, InterruptedException { } private Request makeStatRequest() { - return new Request( - "GET", STATS_API_ENDPOINT - ); + return new Request("GET", STATS_API_ENDPOINT); } private int requestTotal() throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java index 6ef9846557..0bc091b0d2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java @@ -30,96 +30,111 @@ public void init() throws IOException { @Test public void test_mandatory_params() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; JSONObject result = executeJdbcRequest(query); assertEquals(16, result.getInt("total")); } @Test public void test_all_params() { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['Body', Tags], 'taste beer', operator='and', analyzer=english," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, cutoff_frequency=0.33," - + "fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient = true, max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', prefix_length = 7, tie_breaker = 0.3," - + "type = most_fields, slop = 2, zero_terms_query = 'ALL');"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match(['Body', Tags], 'taste beer', operator='and'," + + " analyzer=english,auto_generate_synonyms_phrase_query=true, boost = 0.77," + + " cutoff_frequency=0.33,fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient" + + " = true, max_expansions = 25,minimum_should_match = '2<-25% 9<-3', prefix_length =" + + " 7, tie_breaker = 0.3,type = most_fields, slop = 2, zero_terms_query = 'ALL');"; JSONObject result = executeJdbcRequest(query); assertEquals(10, result.getInt("total")); } @Test public void verify_wildcard_test() { - String query1 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['Tags'], 'taste')"; + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['Tags'], 'taste')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['T*'], 'taste')"; + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['T*'], 'taste')"; JSONObject result2 = executeJdbcRequest(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['*Date'], '2014-01-22');"; + String query = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['*Date'], '2014-01-22');"; JSONObject result = executeJdbcRequest(query); assertEquals(10, result.getInt("total")); } @Test public void test_multimatch_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multimatch('query'='taste', 'fields'='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multimatch('query'='taste', 'fields'='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(8, result.getInt("total")); } @Test public void test_multimatchquery_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multimatchquery(query='cicerone', fields='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multimatchquery(query='cicerone', fields='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("serving cicerone restaurants"), - rows("taste cicerone")); + verifyDataRows(result, rows("serving cicerone restaurants"), rows("taste cicerone")); } @Test public void test_quoted_multi_match_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multi_match('query'='cicerone', 'fields'='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multi_match('query'='cicerone', 'fields'='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("serving cicerone restaurants"), - rows("taste cicerone")); + verifyDataRows(result, rows("serving cicerone restaurants"), rows("taste cicerone")); } @Test public void test_multi_match_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multi_match(query='cicerone', fields='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multi_match(query='cicerone', fields='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("serving cicerone restaurants"), - rows("taste cicerone")); + verifyDataRows(result, rows("serving cicerone restaurants"), rows("taste cicerone")); } @Test public void test_wildcard_multi_match_alternate_parameter_syntax() { - String query = "SELECT Body FROM " + TEST_INDEX_BEER - + " WHERE multi_match(query='IPA', fields='B*') LIMIT 1"; + String query = + "SELECT Body FROM " + + TEST_INDEX_BEER + + " WHERE multi_match(query='IPA', fields='B*') LIMIT 1"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, rows("

I know what makes an IPA an IPA, but what are the unique" + - " characteristics of it's common variants? To be specific, the ones I'm interested in are Double IPA" + - " and Black IPA, but general differences between any other styles would be welcome too.

\n")); + verifyDataRows( + result, + rows( + "

I know what makes an IPA an IPA, but what are the unique characteristics of it's" + + " common variants? To be specific, the ones I'm interested in are Double IPA and" + + " Black IPA, but general differences between any other styles would be welcome" + + " too.

\n")); } @Test public void test_all_params_multimatchquery_alternate_parameter_syntax() { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multimatchquery(query='cicerone', fields='Tags', 'operator'='or', analyzer=english," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, cutoff_frequency=0.33," - + "fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient = true, max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', prefix_length = 7, tie_breaker = 0.3," - + "type = most_fields, slop = 2, zero_terms_query = 'ALL');"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multimatchquery(query='cicerone', fields='Tags', 'operator'='or'," + + " analyzer=english,auto_generate_synonyms_phrase_query=true, boost = 0.77," + + " cutoff_frequency=0.33,fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient" + + " = true, max_expansions = 25,minimum_should_match = '2<-25% 9<-3', prefix_length =" + + " 7, tie_breaker = 0.3,type = most_fields, slop = 2, zero_terms_query = 'ALL');"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); @@ -127,28 +142,28 @@ public void test_all_params_multimatchquery_alternate_parameter_syntax() { @Test public void multi_match_alternate_syntax() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multi_match('2014-01-22');"; + String query = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multi_match('2014-01-22');"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(8, result.getInt("total")); } @Test public void multimatch_alternate_syntax() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multimatch('2014-01-22');"; + String query = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multimatch('2014-01-22');"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(8, result.getInt("total")); } @Test public void multi_match_alternate_syntaxes_return_the_same_results() throws IOException { - String query1 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['CreationDate'], '2014-01-22');"; - String query2 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multi_match('2014-01-22');"; - String query3 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multimatch('2014-01-22');"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['CreationDate'], '2014-01-22');"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multi_match('2014-01-22');"; + String query3 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multimatch('2014-01-22');"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); var result2 = new JSONObject(executeQuery(query2, "jdbc")); var result3 = new JSONObject(executeQuery(query3, "jdbc")); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java index d3230188b7..54831cb561 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java @@ -37,11 +37,13 @@ public void init() throws IOException { @Test public void nested_function_with_array_of_nested_field_test() { - String query = "SELECT nested(message.info), nested(comment.data) FROM " + TEST_INDEX_NESTED_TYPE; + String query = + "SELECT nested(message.info), nested(comment.data) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("c", "ab"), rows("a", "ab"), rows("b", "aa"), @@ -52,17 +54,20 @@ public void nested_function_with_array_of_nested_field_test() { @Test public void nested_function_in_select_test() { - String query = "SELECT nested(message.info), nested(comment.data), " - + "nested(message.dayOfWeek) FROM " - + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + String query = + "SELECT nested(message.info), nested(comment.data), " + + "nested(message.dayOfWeek) FROM " + + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; JSONObject result = executeJdbcRequest(query); assertEquals(5, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.info)", null, "keyword"), schema("nested(comment.data)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", "ab", 1), rows("b", "aa", 2), rows("c", "aa", 1), @@ -74,8 +79,8 @@ public void nested_function_in_select_test() { // gets resolved @Disabled // TODO fix me when aggregation is supported public void nested_function_in_an_aggregate_function_in_select_test() { - String query = "SELECT sum(nested(message.dayOfWeek)) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + String query = + "SELECT sum(nested(message.dayOfWeek)) FROM " + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows(14)); } @@ -83,84 +88,67 @@ public void nested_function_in_an_aggregate_function_in_select_test() { // TODO Enable me when nested aggregation is supported @Disabled public void nested_function_with_arrays_in_an_aggregate_function_in_select_test() { - String query = "SELECT sum(nested(message.dayOfWeek)) FROM " + - TEST_INDEX_NESTED_TYPE; + String query = "SELECT sum(nested(message.dayOfWeek)) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows(19)); } @Test public void nested_function_in_a_function_in_select_test() { - String query = "SELECT upper(nested(message.info)) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + String query = + "SELECT upper(nested(message.info)) FROM " + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("A"), - rows("B"), - rows("C"), - rows("C"), - rows("ZZ")); + verifyDataRows(result, rows("A"), rows("B"), rows("C"), rows("C"), rows("ZZ")); } @Test public void nested_all_function_in_a_function_in_select_test() { - String query = "SELECT nested(message.*) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + " WHERE nested(message.info) = 'a'"; + String query = + "SELECT nested(message.*) FROM " + + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + + " WHERE nested(message.info) = 'a'"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("e", 1, "a")); } @Test public void invalid_multiple_nested_all_function_in_a_function_in_select_test() { - String query = "SELECT nested(message.*), nested(message.info) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; - RuntimeException result = assertThrows( - RuntimeException.class, - () -> executeJdbcRequest(query) - ); + String query = + "SELECT nested(message.*), nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + RuntimeException result = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); assertTrue( result.getMessage().contains("IllegalArgumentException") - && result.getMessage().contains("Multiple entries with same key") - ); + && result.getMessage().contains("Multiple entries with same key")); } @Test public void nested_all_function_with_limit_test() { - String query = "SELECT nested(message.*) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + " LIMIT 3"; + String query = + "SELECT nested(message.*) FROM " + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + " LIMIT 3"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("e", 1, "a"), - rows("f", 2, "b"), - rows("g", 1, "c") - ); + verifyDataRows(result, rows("e", 1, "a"), rows("f", 2, "b"), rows("g", 1, "c")); } - @Test public void nested_function_with_array_of_multi_nested_field_test() { String query = "SELECT nested(message.author.name) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("e"), - rows("f"), - rows("g"), - rows("h"), - rows("p"), - rows("yy")); + verifyDataRows(result, rows("e"), rows("f"), rows("g"), rows("h"), rows("p"), rows("yy")); } @Test public void nested_function_with_null_and_missing_fields_test() { - String query = "SELECT nested(message.info), nested(comment.data) FROM " - + TEST_INDEX_NESTED_WITH_NULLS; + String query = + "SELECT nested(message.info), nested(comment.data) FROM " + TEST_INDEX_NESTED_WITH_NULLS; JSONObject result = executeJdbcRequest(query); assertEquals(10, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows(null, "hh"), rows("b", "aa"), rows("c", "aa"), @@ -176,12 +164,14 @@ public void nested_function_with_null_and_missing_fields_test() { @Test public void nested_function_multiple_fields_with_matched_and_mismatched_paths_test() { String query = - "SELECT nested(message.author), nested(message.dayOfWeek), nested(message.info), nested(comment.data), " - + "nested(comment.likes) FROM " + TEST_INDEX_NESTED_TYPE; + "SELECT nested(message.author), nested(message.dayOfWeek), nested(message.info)," + + " nested(comment.data), nested(comment.likes) FROM " + + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", "ab", 3), rows("f", 2, "b", "aa", 2), rows("g", 1, "c", "aa", 3), @@ -192,12 +182,12 @@ public void nested_function_multiple_fields_with_matched_and_mismatched_paths_te @Test public void nested_function_mixed_with_non_nested_type_test() { - String query = - "SELECT nested(message.info), someField FROM " + TEST_INDEX_NESTED_TYPE; + String query = "SELECT nested(message.info), someField FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", "b"), rows("b", "a"), rows("c", "a"), @@ -209,46 +199,38 @@ public void nested_function_mixed_with_non_nested_type_test() { @Test public void nested_function_with_order_by_clause() { String query = - "SELECT nested(message.info) FROM " + TEST_INDEX_NESTED_TYPE + "SELECT nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.info)"; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("a"), - rows("c"), - rows("a"), - rows("b"), - rows("c"), - rows("zz")); + verifyDataRows(result, rows("a"), rows("c"), rows("a"), rows("b"), rows("c"), rows("zz")); } @Test public void nested_function_with_order_by_clause_desc() { String query = - "SELECT nested(message.info) FROM " + TEST_INDEX_NESTED_TYPE + "SELECT nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.info, message) DESC"; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("zz"), - rows("c"), - rows("c"), - rows("a"), - rows("b"), - rows("a")); + verifyDataRows(result, rows("zz"), rows("c"), rows("c"), rows("a"), rows("b"), rows("a")); } @Test public void nested_function_and_field_with_order_by_clause() { String query = - "SELECT nested(message.info), myNum FROM " + TEST_INDEX_NESTED_TYPE + "SELECT nested(message.info), myNum FROM " + + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.info, message), myNum"; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", 1), rows("c", 4), rows("a", 4), @@ -266,9 +248,12 @@ public void nested_function_with_group_by_clause() { "SELECT count(*) FROM " + TEST_INDEX_NESTED_TYPE + " GROUP BY nested(message.info)"; JSONObject result = executeJdbcRequest(query); - assertTrue(result.getJSONObject("error").get("details").toString().contains( - "Aggregation type nested is not yet implemented" - )); + assertTrue( + result + .getJSONObject("error") + .get("details") + .toString() + .contains("Aggregation type nested is not yet implemented")); } // Nested function in HAVING clause is not yet implemented for JDBC format. This test ensures @@ -277,12 +262,19 @@ public void nested_function_with_group_by_clause() { @Test public void nested_function_with_having_clause() { String query = - "SELECT count(*) FROM " + TEST_INDEX_NESTED_TYPE + " GROUP BY myNum HAVING nested(comment.likes) > 7"; + "SELECT count(*) FROM " + + TEST_INDEX_NESTED_TYPE + + " GROUP BY myNum HAVING nested(comment.likes) > 7"; JSONObject result = executeJdbcRequest(query); - assertTrue(result.getJSONObject("error").get("details").toString().contains( - "For more details, please send request for Json format to see the raw response from OpenSearch engine." - )); + assertTrue( + result + .getJSONObject("error") + .get("details") + .toString() + .contains( + "For more details, please send request for Json format to see the raw response from" + + " OpenSearch engine.")); } @Test @@ -292,13 +284,11 @@ public void nested_function_mixed_with_non_nested_types_test() { JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("a", - new JSONObject(Map.of("south", 3, "west", "ab")), "ab"), - rows("b", - new JSONObject(Map.of("south", 5, "west", "ff")), "ff"), - rows("c", - new JSONObject(Map.of("south", 3, "west", "ll")), "ll"), + verifyDataRows( + result, + rows("a", new JSONObject(Map.of("south", 3, "west", "ab")), "ab"), + rows("b", new JSONObject(Map.of("south", 5, "west", "ff")), "ff"), + rows("c", new JSONObject(Map.of("south", 3, "west", "ll")), "ll"), rows("d", null, null), rows("i", null, null), rows("zz", null, null)); @@ -308,11 +298,13 @@ public void nested_function_mixed_with_non_nested_types_test() { public void nested_function_with_relevance_query() { String query = "SELECT nested(message.info), highlight(someField) FROM " - + TEST_INDEX_NESTED_TYPE + " WHERE match(someField, 'b')"; + + TEST_INDEX_NESTED_TYPE + + " WHERE match(someField, 'b')"; JSONObject result = executeJdbcRequest(query); assertEquals(3, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", new JSONArray(List.of("b"))), rows("c", new JSONArray(List.of("b"))), rows("a", new JSONArray(List.of("b")))); @@ -322,60 +314,68 @@ public void nested_function_with_relevance_query() { public void nested_with_non_nested_type_test() { String query = "SELECT nested(someField) FROM " + TEST_INDEX_NESTED_TYPE; - Exception exception = assertThrows(RuntimeException.class, - () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage().contains( - "{\n" + - " \"error\": {\n" + - " \"reason\": \"Invalid SQL query\",\n" + - " \"details\": \"Illegal nested field name: someField\",\n" + - " \"type\": \"IllegalArgumentException\"\n" + - " },\n" + - " \"status\": 400\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + assertTrue( + exception + .getMessage() + .contains( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid SQL query\",\n" + + " \"details\": \"Illegal nested field name: someField\",\n" + + " \"type\": \"IllegalArgumentException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}")); } @Test public void nested_missing_path() { String query = "SELECT nested(message.invalid) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; - Exception exception = assertThrows(RuntimeException.class, - () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage().contains("" + - "{\n" + - " \"error\": {\n" + - " \"reason\": \"Invalid SQL query\",\n" + - " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME, name=message.invalid) in type env\",\n" + - " \"type\": \"SemanticCheckException\"\n" + - " },\n" + - " \"status\": 400\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + assertTrue( + exception + .getMessage() + .contains( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid SQL query\",\n" + + " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME," + + " name=message.invalid) in type env\",\n" + + " \"type\": \"SemanticCheckException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}")); } @Test public void nested_missing_path_argument() { - String query = "SELECT nested(message.author.name, invalid) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; + String query = + "SELECT nested(message.author.name, invalid) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; - Exception exception = assertThrows(RuntimeException.class, - () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage().contains("" + - "{\n" + - " \"error\": {\n" + - " \"reason\": \"Invalid SQL query\",\n" + - " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env\",\n" + - " \"type\": \"SemanticCheckException\"\n" + - " },\n" + - " \"status\": 400\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + assertTrue( + exception + .getMessage() + .contains( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid SQL query\",\n" + + " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME, name=invalid)" + + " in type env\",\n" + + " \"type\": \"SemanticCheckException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}")); } @Test public void test_nested_where_with_and_conditional() { - String query = "SELECT nested(message.info), nested(message.author) FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message, message.info = 'a' AND message.author = 'e')"; + String query = + "SELECT nested(message.info), nested(message.author) FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message, message.info = 'a' AND message.author = 'e')"; JSONObject result = executeJdbcRequest(query); assertEquals(1, result.getInt("total")); verifyDataRows(result, rows("a", "e")); @@ -383,22 +383,19 @@ public void test_nested_where_with_and_conditional() { @Test public void test_nested_in_select_and_where_as_predicate_expression() { - String query = "SELECT nested(message.info) FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) = 'a'"; + String query = + "SELECT nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message.info) = 'a'"; JSONObject result = executeJdbcRequest(query); assertEquals(3, result.getInt("total")); - verifyDataRows( - result, - rows("a"), - rows("c"), - rows("a") - ); + verifyDataRows(result, rows("a"), rows("c"), rows("a")); } @Test public void test_nested_in_where_as_predicate_expression() { - String query = "SELECT message.info FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) = 'a'"; + String query = + "SELECT message.info FROM " + TEST_INDEX_NESTED_TYPE + " WHERE nested(message.info) = 'a'"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); // Only first index of array is returned. Second index has 'a' @@ -407,8 +404,10 @@ public void test_nested_in_where_as_predicate_expression() { @Test public void test_nested_in_where_as_predicate_expression_with_like() { - String query = "SELECT message.info FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) LIKE 'a'"; + String query = + "SELECT message.info FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message.info) LIKE 'a'"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); // Only first index of array is returned. Second index has 'a' @@ -417,21 +416,22 @@ public void test_nested_in_where_as_predicate_expression_with_like() { @Test public void test_nested_in_where_as_predicate_expression_with_multiple_conditions() { - String query = "SELECT message.info, comment.data, message.dayOfWeek FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) = 'zz' OR nested(comment.data) = 'ab' AND nested(message.dayOfWeek) >= 4"; + String query = + "SELECT message.info, comment.data, message.dayOfWeek FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message.info) = 'zz' OR nested(comment.data) = 'ab' AND" + + " nested(message.dayOfWeek) >= 4"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows( - result, - rows("c", "ab", 4), - rows("zz", "aa", 6) - ); + verifyDataRows(result, rows("c", "ab", 4), rows("zz", "aa", 6)); } @Test public void test_nested_in_where_as_predicate_expression_with_relevance_query() { - String query = "SELECT comment.likes, someField FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(comment.likes) = 10 AND match(someField, 'a')"; + String query = + "SELECT comment.likes, someField FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(comment.likes) = 10 AND match(someField, 'a')"; JSONObject result = executeJdbcRequest(query); assertEquals(1, result.getInt("total")); verifyDataRows(result, rows(10, "a")); @@ -443,11 +443,13 @@ public void nested_function_all_subfields() { JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a"), rows("f", 2, "b"), rows("g", 1, "c"), @@ -458,17 +460,18 @@ public void nested_function_all_subfields() { @Test public void nested_function_all_subfields_and_specified_subfield() { - String query = "SELECT nested(message.*), nested(comment.data) FROM " - + TEST_INDEX_NESTED_TYPE; + String query = "SELECT nested(message.*), nested(comment.data) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword"), schema("nested(comment.data)", null, "keyword")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", "ab"), rows("f", 2, "b", "aa"), rows("g", 1, "c", "aa"), @@ -479,15 +482,16 @@ public void nested_function_all_subfields_and_specified_subfield() { @Test public void nested_function_all_deep_nested_subfields() { - String query = "SELECT nested(message.author.address.*) FROM " - + TEST_INDEX_MULTI_NESTED_TYPE; + String query = "SELECT nested(message.author.address.*) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author.address.number)", null, "integer"), schema("nested(message.author.address.street)", null, "keyword")); - verifyDataRows(result, + verifyDataRows( + result, rows(1, "bc"), rows(2, "ab"), rows(3, "sk"), @@ -498,18 +502,19 @@ public void nested_function_all_deep_nested_subfields() { @Test public void nested_function_all_subfields_for_two_nested_fields() { - String query = "SELECT nested(message.*), nested(comment.*) FROM " - + TEST_INDEX_NESTED_TYPE; + String query = "SELECT nested(message.*), nested(comment.*) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword"), schema("nested(comment.data)", null, "keyword"), schema("nested(comment.likes)", null, "long")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", "ab", 3), rows("f", 2, "b", "aa", 2), rows("g", 1, "c", "aa", 3), @@ -524,12 +529,14 @@ public void nested_function_all_subfields_and_non_nested_field() { JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword"), schema("myNum", null, "long")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", 1), rows("f", 2, "b", 2), rows("g", 1, "c", 3), @@ -544,17 +551,15 @@ public void nested_function_with_date_types_as_object_arrays_within_arrays_test( JSONObject result = executeJdbcRequest(query); assertEquals(11, result.getInt("total")); - verifySchema(result, - schema("nested(address.moveInDate)", null, "object") - ); - verifyDataRows(result, - rows(new JSONObject(Map.of("dateAndTime","1984-04-12 09:07:42"))), - rows(new JSONArray( - List.of( - Map.of("dateAndTime", "2023-05-03 08:07:42"), - Map.of("dateAndTime", "2001-11-11 04:07:44")) - ) - ), + verifySchema(result, schema("nested(address.moveInDate)", null, "object")); + verifyDataRows( + result, + rows(new JSONObject(Map.of("dateAndTime", "1984-04-12 09:07:42"))), + rows( + new JSONArray( + List.of( + Map.of("dateAndTime", "2023-05-03 08:07:42"), + Map.of("dateAndTime", "2001-11-11 04:07:44")))), rows(new JSONObject(Map.of("dateAndTime", "1966-03-19 03:04:55"))), rows(new JSONObject(Map.of("dateAndTime", "2011-06-01 01:01:42"))), rows(new JSONObject(Map.of("dateAndTime", "1901-08-11 04:03:33"))), @@ -563,30 +568,27 @@ public void nested_function_with_date_types_as_object_arrays_within_arrays_test( rows(new JSONObject(Map.of("dateAndTime", "1977-07-13 09:04:41"))), rows(new JSONObject(Map.of("dateAndTime", "1933-12-12 05:05:45"))), rows(new JSONObject(Map.of("dateAndTime", "1909-06-17 01:04:21"))), - rows(new JSONArray( - List.of( - Map.of("dateAndTime", "2001-11-11 04:07:44")) - ) - ) - ); + rows(new JSONArray(List.of(Map.of("dateAndTime", "2001-11-11 04:07:44"))))); } @Test public void nested_function_all_subfields_in_wrong_clause() { String query = "SELECT * FROM " + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.*)"; - Exception exception = assertThrows(RuntimeException.class, () -> - executeJdbcRequest(query)); - - assertTrue(exception.getMessage().contains("" + - "{\n" + - " \"error\": {\n" + - " \"reason\": \"There was internal problem at backend\",\n" + - " \"details\": \"Invalid use of expression nested(message.*)\",\n" + - " \"type\": \"UnsupportedOperationException\"\n" + - " },\n" + - " \"status\": 503\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + + assertTrue( + exception + .getMessage() + .contains( + "" + + "{\n" + + " \"error\": {\n" + + " \"reason\": \"There was internal problem at backend\",\n" + + " \"details\": \"Invalid use of expression nested(message.*)\",\n" + + " \"type\": \"UnsupportedOperationException\"\n" + + " },\n" + + " \"status\": 503\n" + + "}")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java index de3dd0fe98..547c88859e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java @@ -77,8 +77,7 @@ public NowLikeFunctionIT( @Name("constValue") Boolean constValue, @Name("referenceGetter") Supplier referenceGetter, @Name("parser") BiFunction parser, - @Name("serializationPatternStr") String serializationPatternStr - ) { + @Name("serializationPatternStr") String serializationPatternStr) { this.name = name; this.hasFsp = hasFsp; this.hasShortcut = hasShortcut; @@ -90,56 +89,104 @@ public NowLikeFunctionIT( @ParametersFactory(argumentFormatting = "%1$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("now", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("current_timestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtimestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtime", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("sysdate", true, false, false, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("curtime", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("current_time", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("curdate", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("current_date", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_date", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalDate()), - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_time", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalTime()), - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("utc_timestamp", false, false, true, - (Supplier) (NowLikeFunctionIT::utcDateTimeNow), - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss") - )); + return Arrays.asList( + $$( + $( + "now", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "current_timestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtimestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtime", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "sysdate", + true, + false, + false, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "curtime", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "current_time", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "curdate", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "current_date", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_date", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalDate()), + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_time", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalTime()), + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "utc_timestamp", + false, + false, + true, + (Supplier) (NowLikeFunctionIT::utcDateTimeNow), + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"))); } private long getDiff(Temporal sample, Temporal reference) { @@ -150,14 +197,14 @@ private long getDiff(Temporal sample, Temporal reference) { } public static LocalDateTime utcDateTimeNow() { - ZonedDateTime zonedDateTime = - LocalDateTime.now().atZone(TimeZone.getDefault().toZoneId()); + ZonedDateTime zonedDateTime = LocalDateTime.now().atZone(TimeZone.getDefault().toZoneId()); return zonedDateTime.withZoneSameInstant(ZoneId.of("UTC")).toLocalDateTime(); } @Test public void testNowLikeFunctions() throws IOException { - var serializationPattern = new DateTimeFormatterBuilder() + var serializationPattern = + new DateTimeFormatterBuilder() .appendPattern(serializationPatternStr) .optionalStart() .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true) @@ -167,15 +214,16 @@ public void testNowLikeFunctions() throws IOException { double delta = 2d; // acceptable time diff, secs if (reference instanceof LocalDate) delta = 1d; // Max date delta could be 1 if test runs on the very edge of two days - // We ignore probability of a test run on edge of month or year to simplify the checks + // We ignore probability of a test run on edge of month or year to simplify the checks - var calls = new ArrayList() {{ - add(name + "()"); - }}; - if (hasShortcut) - calls.add(name); - if (hasFsp) - calls.add(name + "(0)"); + var calls = + new ArrayList() { + { + add(name + "()"); + } + }; + if (hasShortcut) calls.add(name); + if (hasFsp) calls.add(name + "(0)"); // Column order is: func(), func, func(0) // shortcut ^ fsp ^ @@ -185,20 +233,25 @@ public void testNowLikeFunctions() throws IOException { JSONArray firstRow = rows.getJSONArray(0); for (int i = 0; i < rows.length(); i++) { var row = rows.getJSONArray(i); - if (constValue) - assertTrue(firstRow.similar(row)); + if (constValue) assertTrue(firstRow.similar(row)); int column = 0; - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); if (hasShortcut) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); } if (hasFsp) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column), serializationPattern)), + delta); } } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java index b8bf0963b5..f885b6d4e0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -34,28 +33,22 @@ public void testNullLiteralSchema() { @Test public void testNullLiteralInOperator() { - verifyDataRows( - query("SELECT NULL = NULL, NULL AND TRUE"), - rows(null, null)); + verifyDataRows(query("SELECT NULL = NULL, NULL AND TRUE"), rows(null, null)); } @Test public void testNullLiteralInFunction() { - verifyDataRows( - query("SELECT ABS(NULL), POW(2, FLOOR(NULL))"), - rows(null, null)); + verifyDataRows(query("SELECT ABS(NULL), POW(2, FLOOR(NULL))"), rows(null, null)); } @Test public void testNullLiteralInInterval() { verifyDataRows( query("SELECT INTERVAL NULL DAY, INTERVAL 60 * 60 * 24 * (NULL - FLOOR(NULL)) SECOND"), - rows(null, null) - ); + rows(null, null)); } private JSONObject query(String sql) { return new JSONObject(executeQuery(sql, "jdbc")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java index e6f4e18468..84289d8f57 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import com.carrotsearch.randomizedtesting.annotations.Name; @@ -27,8 +26,7 @@ public class PaginationBlackboxIT extends SQLIntegTestCase { private final Index index; private final Integer pageSize; - public PaginationBlackboxIT(@Name("index") Index index, - @Name("pageSize") Integer pageSize) { + public PaginationBlackboxIT(@Name("index") Index index, @Name("pageSize") Integer pageSize) { this.index = index; this.pageSize = pageSize; } @@ -45,7 +43,7 @@ public static Iterable compareTwoDates() { var testData = new ArrayList(); for (var index : indices) { for (var pageSize : pageSizes) { - testData.add(new Object[] { index, pageSize }); + testData.add(new Object[] {index, pageSize}); } } return testData; @@ -64,14 +62,19 @@ public void test_pagination_blackbox() { var responseCounter = 1; this.logger.info(testReportPrefix + "first response"); - response = new JSONObject(executeFetchQuery( - String.format("select * from %s", index.getName()), pageSize, "jdbc")); + response = + new JSONObject( + executeFetchQuery( + String.format("select * from %s", index.getName()), pageSize, "jdbc")); - var cursor = response.has("cursor")? response.getString("cursor") : ""; + var cursor = response.has("cursor") ? response.getString("cursor") : ""; do { - this.logger.info(testReportPrefix - + String.format("subsequent response %d/%d", responseCounter++, (indexSize / pageSize) + 1)); - assertTrue("Paged response schema doesn't match to non-paged", + this.logger.info( + testReportPrefix + + String.format( + "subsequent response %d/%d", responseCounter++, (indexSize / pageSize) + 1)); + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); rowsReturned += response.getInt("size"); @@ -88,13 +91,17 @@ public void test_pagination_blackbox() { cursor = ""; } - } while(!cursor.isEmpty()); - assertTrue("Paged response schema doesn't match to non-paged", + } while (!cursor.isEmpty()); + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); - assertEquals(testReportPrefix + "Paged responses return another row count that non-paged", - indexSize, rowsReturned); - assertTrue(testReportPrefix + "Paged accumulated result has other rows than non-paged", + assertEquals( + testReportPrefix + "Paged responses return another row count that non-paged", + indexSize, + rowsReturned); + assertTrue( + testReportPrefix + "Paged accumulated result has other rows than non-paged", rows.similar(rowsPaged)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java index 213c9322e1..dfb0bb2080 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java @@ -36,23 +36,24 @@ public void testSelectAll() throws IOException { @Test public void testSelectWithOpenSearchFuncInFilter() throws IOException { - var response = executeQueryTemplate( - "SELECT * FROM %s WHERE `11` = match_phrase('96')", TEST_INDEX_ONLINE); + var response = + executeQueryTemplate("SELECT * FROM %s WHERE `11` = match_phrase('96')", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @Test public void testSelectWithHighlight() throws IOException { - var response = executeQueryTemplate( - "SELECT highlight(`11`) FROM %s WHERE match_query(`11`, '96')", TEST_INDEX_ONLINE); + var response = + executeQueryTemplate( + "SELECT highlight(`11`) FROM %s WHERE match_query(`11`, '96')", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @Test public void testSelectWithFullTextSearch() throws IOException { - var response = executeQueryTemplate( - "SELECT * FROM %s WHERE match_phrase(`11`, '96')", TEST_INDEX_ONLINE); + var response = + executeQueryTemplate("SELECT * FROM %s WHERE match_phrase(`11`, '96')", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @@ -64,8 +65,7 @@ public void testSelectFromIndexWildcard() throws IOException { @Test public void testSelectFromDataSource() throws IOException { - var response = executeQueryTemplate("SELECT * FROM @opensearch.%s", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM @opensearch.%s", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @@ -77,31 +77,29 @@ public void testSelectColumnReference() throws IOException { @Test public void testSubquery() throws IOException { - var response = executeQueryTemplate("SELECT `107` from (SELECT * FROM %s)", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT `107` from (SELECT * FROM %s)", TEST_INDEX_ONLINE); verifyIsV1Cursor(response); } @Test public void testSelectExpression() throws IOException { - var response = executeQueryTemplate("SELECT 1 + 1 - `107` from %s", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT 1 + 1 - `107` from %s", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @Test public void testGroupBy() throws IOException { // GROUP BY is not paged by either engine. - var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107`", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107`", TEST_INDEX_ONLINE); TestUtils.verifyNoCursor(response); } @Test public void testGroupByHaving() throws IOException { // GROUP BY is not paged by either engine. - var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107` HAVING `107` > 400", - TEST_INDEX_ONLINE); + var response = + executeQueryTemplate( + "SELECT * FROM %s GROUP BY `107` HAVING `107` > 400", TEST_INDEX_ONLINE); TestUtils.verifyNoCursor(response); } @@ -113,15 +111,13 @@ public void testLimit() throws IOException { @Test public void testLimitOffset() throws IOException { - var response = executeQueryTemplate("SELECT * FROM %s LIMIT 8 OFFSET 4", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM %s LIMIT 8 OFFSET 4", TEST_INDEX_ONLINE); verifyIsV1Cursor(response); } @Test public void testOrderBy() throws IOException { - var response = executeQueryTemplate("SELECT * FROM %s ORDER By `107`", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM %s ORDER By `107`", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java index 6ebc05efad..038596cf57 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import com.carrotsearch.randomizedtesting.annotations.Name; @@ -22,38 +21,48 @@ import org.opensearch.sql.legacy.TestsConstants; /** - * Test pagination with `WHERE` clause using a parametrized test. - * See constructor {@link #PaginationFilterIT} for list of parameters - * and {@link #generateParameters} and {@link #STATEMENT_TO_NUM_OF_PAGES} - * to see how these parameters are generated. + * Test pagination with `WHERE` clause using a parametrized test. See constructor {@link + * #PaginationFilterIT} for list of parameters and {@link #generateParameters} and {@link + * #STATEMENT_TO_NUM_OF_PAGES} to see how these parameters are generated. */ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) public class PaginationFilterIT extends SQLIntegTestCase { /** - * Map of the OS-SQL statement sent to SQL-plugin, and the total number - * of expected hits (on all pages) from the filtered result + * Map of the OS-SQL statement sent to SQL-plugin, and the total number of expected hits (on all + * pages) from the filtered result */ - final private static Map STATEMENT_TO_NUM_OF_PAGES = Map.of( - "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT, 1000, - "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street')", 385, - "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street') AND match(city, 'Ola')", 1, - "SELECT firstname, lastname, highlight(address) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street') AND match(state, 'OH')", 5, - "SELECT firstname, lastname, highlight('*') FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street') AND match(state, 'OH')", 5, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE true", 60, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id=10", 1, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id + 5=15", 1, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BANK, 7 - ); + private static final Map STATEMENT_TO_NUM_OF_PAGES = + Map.of( + "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT, 1000, + "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street')", + 385, + "SELECT * FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE match(address, 'street') AND match(city, 'Ola')", + 1, + "SELECT firstname, lastname, highlight(address) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE match(address, 'street') AND match(state, 'OH')", + 5, + "SELECT firstname, lastname, highlight('*') FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE match(address, 'street') AND match(state, 'OH')", + 5, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE true", 60, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id=10", 1, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id + 5=15", 1, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BANK, 7); private final String sqlStatement; private final Integer totalHits; private final Integer pageSize; - public PaginationFilterIT(@Name("statement") String sqlStatement, - @Name("total_hits") Integer totalHits, - @Name("page_size") Integer pageSize) { + public PaginationFilterIT( + @Name("statement") String sqlStatement, + @Name("total_hits") Integer totalHits, + @Name("page_size") Integer pageSize) { this.sqlStatement = sqlStatement; this.totalHits = totalHits; this.pageSize = pageSize; @@ -72,18 +81,18 @@ public static Iterable generateParameters() { List pageSizes = List.of(5, 1000); List testData = new ArrayList(); - STATEMENT_TO_NUM_OF_PAGES.forEach((statement, totalHits) -> { - for (var pageSize : pageSizes) { - testData.add(new Object[] { statement, totalHits, pageSize }); - } - }); + STATEMENT_TO_NUM_OF_PAGES.forEach( + (statement, totalHits) -> { + for (var pageSize : pageSizes) { + testData.add(new Object[] {statement, totalHits, pageSize}); + } + }); return testData; } /** - * Test compares non-paginated results with paginated results - * To ensure that the pushdowns return the same number of hits even - * with filter WHERE pushed down + * Test compares non-paginated results with paginated results To ensure that the pushdowns return + * the same number of hits even with filter WHERE pushed down */ @Test @SneakyThrows @@ -93,7 +102,10 @@ public void test_pagination_with_where() { int totalResultsCount = nonPaginatedResponse.getInt("total"); JSONArray rows = nonPaginatedResponse.getJSONArray("datarows"); JSONArray schema = nonPaginatedResponse.getJSONArray("schema"); - var testReportPrefix = String.format("query: %s; total hits: %d; page size: %d || ", sqlStatement, totalResultsCount, pageSize); + var testReportPrefix = + String.format( + "query: %s; total hits: %d; page size: %d || ", + sqlStatement, totalResultsCount, pageSize); assertEquals(totalHits.intValue(), totalResultsCount); var rowsPaged = new JSONArray(); @@ -101,7 +113,8 @@ public void test_pagination_with_where() { var responseCounter = 1; // make first request - with a cursor - JSONObject paginatedResponse = new JSONObject(executeFetchQuery(sqlStatement, pageSize, "jdbc")); + JSONObject paginatedResponse = + new JSONObject(executeFetchQuery(sqlStatement, pageSize, "jdbc")); this.logger.info(testReportPrefix + ""); do { var cursor = paginatedResponse.has("cursor") ? paginatedResponse.getString("cursor") : null; @@ -117,27 +130,34 @@ public void test_pagination_with_where() { if (cursor != null) { assertTrue( - testReportPrefix + "Cursor returned from legacy engine", - cursor.startsWith("n:")); + testReportPrefix + "Cursor returned from legacy engine", cursor.startsWith("n:")); paginatedResponse = executeCursorQuery(cursor); - this.logger.info(testReportPrefix - + String.format("response %d/%d", responseCounter++, (totalResultsCount / pageSize) + 1)); + this.logger.info( + testReportPrefix + + String.format( + "response %d/%d", responseCounter++, (totalResultsCount / pageSize) + 1)); } else { break; } } while (true); // last page expected results: - assertEquals(testReportPrefix + "Last page", - totalHits % pageSize, paginatedResponse.getInt("size")); - assertEquals(testReportPrefix + "Last page", - totalHits % pageSize, paginatedResponse.getJSONArray("datarows").length()); + assertEquals( + testReportPrefix + "Last page", totalHits % pageSize, paginatedResponse.getInt("size")); + assertEquals( + testReportPrefix + "Last page", + totalHits % pageSize, + paginatedResponse.getJSONArray("datarows").length()); // compare paginated and non-paginated counts - assertEquals(testReportPrefix + "Paged responses returned an unexpected total", - totalResultsCount, pagedSize); - assertEquals(testReportPrefix + "Paged responses returned an unexpected rows count", - rows.length(), rowsPaged.length()); + assertEquals( + testReportPrefix + "Paged responses returned an unexpected total", + totalResultsCount, + pagedSize); + assertEquals( + testReportPrefix + "Paged responses returned an unexpected rows count", + rows.length(), + rowsPaged.length()); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java index 224a1e95e4..49ef7c583e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java @@ -73,12 +73,16 @@ public void testCursorTimeout() throws IOException, InterruptedException { ResponseException exception = expectThrows(ResponseException.class, () -> executeCursorQuery(cursor)); response = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); - assertEquals(response.getJSONObject("error").getString("reason"), + assertEquals( + response.getJSONObject("error").getString("reason"), "Error occurred in OpenSearch engine: all shards failed"); - assertTrue(response.getJSONObject("error").getString("details") - .contains("SearchContextMissingException[No search context found for id")); - assertEquals(response.getJSONObject("error").getString("type"), - "SearchPhaseExecutionException"); + assertTrue( + response + .getJSONObject("error") + .getString("details") + .contains("SearchContextMissingException[No search context found for id")); + assertEquals( + response.getJSONObject("error").getString("type"), "SearchPhaseExecutionException"); wipeAllClusterSettings(); } @@ -106,12 +110,16 @@ public void testCloseCursor() { ResponseException exception = expectThrows(ResponseException.class, () -> executeCursorQuery(cursor)); response = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); - assertEquals(response.getJSONObject("error").getString("reason"), + assertEquals( + response.getJSONObject("error").getString("reason"), "Error occurred in OpenSearch engine: all shards failed"); - assertTrue(response.getJSONObject("error").getString("details") - .contains("SearchContextMissingException[No search context found for id")); - assertEquals(response.getJSONObject("error").getString("type"), - "SearchPhaseExecutionException"); + assertTrue( + response + .getJSONObject("error") + .getString("details") + .contains("SearchContextMissingException[No search context found for id")); + assertEquals( + response.getJSONObject("error").getString("type"), "SearchPhaseExecutionException"); } @Test @@ -134,7 +142,8 @@ public void testQueryWithOrderBy() { var cursor = response.getString("cursor"); do { assertTrue(cursor.isEmpty() || cursor.startsWith("n:")); - assertTrue("Paged response schema doesn't match to non-paged", + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); rowsReturnedAsc += response.getInt("size"); @@ -151,7 +160,7 @@ public void testQueryWithOrderBy() { cursor = ""; } - } while(!cursor.isEmpty()); + } while (!cursor.isEmpty()); query = String.format("SELECT * from %s ORDER BY num1 DESC", TEST_INDEX_CALCS); response = new JSONObject(executeFetchQuery(query, 7, "jdbc")); @@ -160,7 +169,8 @@ public void testQueryWithOrderBy() { cursor = response.getString("cursor"); do { assertTrue(cursor.isEmpty() || cursor.startsWith("n:")); - assertTrue("Paged response schema doesn't match to non-paged", + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); rowsReturnedDesc += response.getInt("size"); @@ -177,19 +187,22 @@ public void testQueryWithOrderBy() { cursor = ""; } - } while(!cursor.isEmpty()); + } while (!cursor.isEmpty()); - assertEquals("Paged responses return another row count that non-paged", - indexSize, rowsReturnedAsc); - assertEquals("Paged responses return another row count that non-paged", - indexSize, rowsReturnedDesc); - assertTrue("Paged accumulated result has other rows than non-paged", + assertEquals( + "Paged responses return another row count that non-paged", indexSize, rowsReturnedAsc); + assertEquals( + "Paged responses return another row count that non-paged", indexSize, rowsReturnedDesc); + assertTrue( + "Paged accumulated result has other rows than non-paged", rows.toList().containsAll(rowsPagedAsc.toList())); - assertTrue("Paged accumulated result has other rows than non-paged", + assertTrue( + "Paged accumulated result has other rows than non-paged", rows.toList().containsAll(rowsPagedDesc.toList())); for (int row = 0; row < indexSize; row++) { - assertTrue(String.format("Row %d: row order is incorrect", row), + assertTrue( + String.format("Row %d: row order is incorrect", row), rowsPagedAsc.getJSONArray(row).similar(rowsPagedDesc.getJSONArray(indexSize - row - 1))); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java index be208cd137..246cbfc4a0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java @@ -40,10 +40,11 @@ public void testFetchSizeLessThanMaxResultWindow() throws IOException { } while (response.has("cursor")); numRows += response.getJSONArray("datarows").length(); - var countRows = executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) - .getJSONArray("datarows") - .getJSONArray(0) - .get(0); + var countRows = + executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) + .getJSONArray("datarows") + .getJSONArray(0) + .get(0); assertEquals(countRows, numRows); } @@ -62,10 +63,11 @@ public void testQuerySizeLimitDoesNotEffectTotalRowsReturned() throws IOExceptio response = executeCursorQuery(cursor); } while (response.has("cursor")); numRows += response.getJSONArray("datarows").length(); - var countRows = executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) - .getJSONArray("datarows") - .getJSONArray(0) - .get(0); + var countRows = + executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) + .getJSONArray("datarows") + .getJSONArray(0) + .get(0); assertEquals(countRows, numRows); assertTrue(numRows > querySizeLimit); } @@ -74,12 +76,10 @@ public void testQuerySizeLimitDoesNotEffectTotalRowsReturned() throws IOExceptio public void testQuerySizeLimitDoesNotEffectPageSize() throws IOException { setQuerySizeLimit(3); setMaxResultWindow(TEST_INDEX_PHRASE, 4); - var response - = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 4); + var response = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 4); assertEquals(4, response.getInt("size")); - var response2 - = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 2); + var response2 = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 2); assertEquals(2, response2.getInt("size")); } @@ -87,11 +87,9 @@ public void testQuerySizeLimitDoesNotEffectPageSize() throws IOException { public void testFetchSizeLargerThanResultWindowFails() throws IOException { final int window = 2; setMaxResultWindow(TEST_INDEX_PHRASE, 2); - assertThrows(ResponseException.class, - () -> executeQueryTemplate("SELECT * FROM %s", - TEST_INDEX_PHRASE, window + 1)); + assertThrows( + ResponseException.class, + () -> executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, window + 1)); resetMaxResultWindow(TEST_INDEX_PHRASE); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java index d0587eab7f..6a9d40e7c3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java @@ -26,19 +26,29 @@ protected void init() throws Exception { @Test public void position_function_test() { String query = "SELECT firstname, position('a' IN firstname) FROM %s"; - JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_PEOPLE2)); + JSONObject response = + executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_PEOPLE2)); - verifySchema(response, schema("firstname", null, "keyword"), - schema("position('a' IN firstname)", null, "integer")); + verifySchema( + response, + schema("firstname", null, "keyword"), + schema("position('a' IN firstname)", null, "integer")); assertEquals(12, response.getInt("total")); - verifyDataRows(response, - rows("Daenerys", 2), rows("Hattie", 2), - rows("Nanette", 2), rows("Dale", 2), - rows("Elinor", 0), rows("Virginia", 8), - rows("Dillard", 5), rows("Mcgee", 0), - rows("Aurelia", 7), rows("Fulton", 0), - rows("Burton", 0), rows("Josie", 0)); + verifyDataRows( + response, + rows("Daenerys", 2), + rows("Hattie", 2), + rows("Nanette", 2), + rows("Dale", 2), + rows("Elinor", 0), + rows("Virginia", 8), + rows("Dillard", 5), + rows("Mcgee", 0), + rows("Aurelia", 7), + rows("Fulton", 0), + rows("Burton", 0), + rows("Josie", 0)); } @Test @@ -46,20 +56,31 @@ public void position_function_with_nulls_test() { String query = "SELECT str2, position('ee' IN str2) FROM %s"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_CALCS)); - verifySchema(response, schema("str2", null, "keyword"), - schema("position('ee' IN str2)", null, "integer")); + verifySchema( + response, + schema("str2", null, "keyword"), + schema("position('ee' IN str2)", null, "integer")); assertEquals(17, response.getInt("total")); - verifyDataRows(response, - rows("one", 0), rows("two", 0), - rows("three", 4), rows(null, null), - rows("five", 0), rows("six", 0), - rows(null, null), rows("eight", 0), - rows("nine", 0), rows("ten", 0), - rows("eleven", 0), rows("twelve", 0), - rows(null, null), rows("fourteen", 6), - rows("fifteen", 5), rows("sixteen", 5), - rows(null, null)); + verifyDataRows( + response, + rows("one", 0), + rows("two", 0), + rows("three", 4), + rows(null, null), + rows("five", 0), + rows("six", 0), + rows(null, null), + rows("eight", 0), + rows("nine", 0), + rows("ten", 0), + rows("eleven", 0), + rows("twelve", 0), + rows(null, null), + rows("fourteen", 6), + rows("fifteen", 5), + rows("sixteen", 5), + rows(null, null)); } @Test @@ -86,7 +107,8 @@ public void position_function_with_only_fields_as_args_test() { @Test public void position_function_with_function_as_arg_test() { - String query = "SELECT position(upper(str3) IN str1) FROM %s WHERE str1 LIKE 'BINDING SUPPLIES'"; + String query = + "SELECT position(upper(str3) IN str1) FROM %s WHERE str1 LIKE 'BINDING SUPPLIES'"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_CALCS)); verifySchema(response, schema("position(upper(str3) IN str1)", null, "integer")); @@ -110,17 +132,21 @@ public void position_function_in_where_clause_test() { public void position_function_with_null_args_test() { String query1 = "SELECT str2, position(null IN str2) FROM %s WHERE str2 IN ('one')"; String query2 = "SELECT str2, position(str2 IN null) FROM %s WHERE str2 IN ('one')"; - JSONObject response1 = executeJdbcRequest(String.format(query1, TestsConstants.TEST_INDEX_CALCS)); - JSONObject response2 = executeJdbcRequest(String.format(query2, TestsConstants.TEST_INDEX_CALCS)); - - verifySchema(response1, - schema("str2", null, "keyword"), - schema("position(null IN str2)", null, "integer")); + JSONObject response1 = + executeJdbcRequest(String.format(query1, TestsConstants.TEST_INDEX_CALCS)); + JSONObject response2 = + executeJdbcRequest(String.format(query2, TestsConstants.TEST_INDEX_CALCS)); + + verifySchema( + response1, + schema("str2", null, "keyword"), + schema("position(null IN str2)", null, "integer")); assertEquals(1, response1.getInt("total")); - verifySchema(response2, - schema("str2", null, "keyword"), - schema("position(str2 IN null)", null, "integer")); + verifySchema( + response2, + schema("str2", null, "keyword"), + schema("position(str2 IN null)", null, "integer")); assertEquals(1, response2.getInt("total")); verifyDataRows(response1, rows("one", null)); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java index 38ff32b0d7..8200f64b66 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import org.json.JSONObject; @@ -20,16 +19,21 @@ protected void init() throws Exception { @Test public void testPreparedStatement() { - JSONObject response = new JSONObject( - executeQuery(String.format("{\n" - + " \"query\": \"SELECT state FROM %s WHERE state = ? GROUP BY state\",\n" - + " \"parameters\": [\n" - + " {\n" - + " \"type\": \"string\",\n" - + " \"value\": \"WA\"\n" - + " }\n" - + " ]\n" - + "}", TestsConstants.TEST_INDEX_ACCOUNT), "jdbc")); + JSONObject response = + new JSONObject( + executeQuery( + String.format( + "{\n" + + " \"query\": \"SELECT state FROM %s WHERE state = ? GROUP BY state\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"WA\"\n" + + " }\n" + + " ]\n" + + "}", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc")); assertFalse(response.getJSONArray("datarows").isEmpty()); } @@ -39,5 +43,4 @@ protected String makeRequest(String query) { // Avoid wrap with "query" again return query; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java index e61593eb21..fd8066ea41 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java @@ -13,72 +13,80 @@ import org.opensearch.sql.legacy.SQLIntegTestCase; public class QueryIT extends SQLIntegTestCase { - @Override - public void init() throws IOException { - loadIndex(Index.BEER); - } + @Override + public void init() throws IOException { + loadIndex(Index.BEER); + } - @Test - public void all_fields_test() throws IOException { - String query = "SELECT * FROM " - + TEST_INDEX_BEER + " WHERE query('*:taste')"; - JSONObject result = executeJdbcRequest(query); - assertEquals(16, result.getInt("total")); - } + @Test + public void all_fields_test() throws IOException { + String query = "SELECT * FROM " + TEST_INDEX_BEER + " WHERE query('*:taste')"; + JSONObject result = executeJdbcRequest(query); + assertEquals(16, result.getInt("total")); + } - @Test - public void mandatory_params_test() throws IOException { - String query = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('Tags:taste OR Body:taste')"; - JSONObject result = executeJdbcRequest(query); - assertEquals(16, result.getInt("total")); - } + @Test + public void mandatory_params_test() throws IOException { + String query = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:taste OR Body:taste')"; + JSONObject result = executeJdbcRequest(query); + assertEquals(16, result.getInt("total")); + } - @Test - public void all_params_test() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query('Tags:taste', escape=false," - + "allow_leading_wildcard=true, enable_position_increments=true," - + "fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states = 10000," - + "analyzer='standard', analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77," - + "quote_analyzer='standard', phrase_slop=0, rewrite='constant_score', type='best_fields'," - + "tie_breaker=0.3, time_zone='Canada/Pacific', default_operator='or'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7);"; - JSONObject result = executeJdbcRequest(query); - assertEquals(8, result.getInt("total")); - } + @Test + public void all_params_test() throws IOException { + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query('Tags:taste'," + + " escape=false,allow_leading_wildcard=true," + + " enable_position_increments=true," + + " fuzziness= 1," + + " fuzzy_rewrite='constant_score'," + + " max_determinized_states = 10000," + + " analyzer='standard'," + + " analyze_wildcard = false," + + " quote_field_suffix = '.exact'," + + " auto_generate_synonyms_phrase_query=true," + + " boost = 0.77," + + " quote_analyzer='standard'," + + " phrase_slop=0," + + " rewrite='constant_score'," + + " type='best_fields'," + + " tie_breaker=0.3," + + " time_zone='Canada/Pacific'," + + " default_operator='or'," + + " fuzzy_transpositions = false," + + " lenient = true," + + " fuzzy_max_expansions = 25," + + " minimum_should_match = '2<-25% 9<-3'," + + " fuzzy_prefix_length = 7);"; + JSONObject result = executeJdbcRequest(query); + assertEquals(8, result.getInt("total")); + } - @Test - public void wildcard_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; - JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('*:taste')"; - JSONObject result2 = executeJdbcRequest(query2); - assertNotEquals(result2.getInt("total"), result1.getInt("total")); + @Test + public void wildcard_test() throws IOException { + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; + JSONObject result1 = executeJdbcRequest(query1); + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('*:taste')"; + JSONObject result2 = executeJdbcRequest(query2); + assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query('Tags:tas*');"; - JSONObject result3 = executeJdbcRequest(query3); - assertEquals(8, result3.getInt("total")); + String query3 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:tas*');"; + JSONObject result3 = executeJdbcRequest(query3); + assertEquals(8, result3.getInt("total")); - String query4 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query('Tags:tas?e');"; - JSONObject result4 = executeJdbcRequest(query3); - assertEquals(8, result4.getInt("total")); - } + String query4 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:tas?e');"; + JSONObject result4 = executeJdbcRequest(query3); + assertEquals(8, result4.getInt("total")); + } - @Test - public void query_string_and_query_return_the_same_results_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; - JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string(['Tags'],'taste')"; - JSONObject result2 = executeJdbcRequest(query2); - assertEquals(result2.getInt("total"), result1.getInt("total")); - } + @Test + public void query_string_and_query_return_the_same_results_test() throws IOException { + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; + JSONObject result1 = executeJdbcRequest(query1); + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['Tags'],'taste')"; + JSONObject result2 = executeJdbcRequest(query2); + assertEquals(result2.getInt("total"), result1.getInt("total")); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java index 348889a0cc..3d4e08b4cd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java @@ -20,48 +20,65 @@ public void init() throws IOException { @Test public void all_fields_test() throws IOException { - String query = "SELECT * FROM " - + TEST_INDEX_BEER + " WHERE query_string(['*'], 'taste')"; + String query = "SELECT * FROM " + TEST_INDEX_BEER + " WHERE query_string(['*'], 'taste')"; JSONObject result = executeJdbcRequest(query); assertEquals(16, result.getInt("total")); } @Test public void mandatory_params_test() throws IOException { - String query = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; JSONObject result = executeJdbcRequest(query); assertEquals(16, result.getInt("total")); } @Test public void all_params_test() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query_string(['Body', Tags, Title], 'taste beer', escape=false," - + "allow_leading_wildcard=true, enable_position_increments=true," - + "fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states = 10000," - + "analyzer='english', analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77," - + "quote_analyzer='standard', phrase_slop=0, rewrite='constant_score', type='best_fields'," - + "tie_breaker=0.3, time_zone='Canada/Pacific', default_operator='or'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7);"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string(['Body', Tags, Title]," + + " 'taste beer'," + + " escape=false," + + " allow_leading_wildcard=true," + + " enable_position_increments=true," + + " fuzziness= 1," + + " fuzzy_rewrite='constant_score'," + + " max_determinized_states = 10000," + + " analyzer='english'," + + " analyze_wildcard = false," + + " quote_field_suffix = '.exact'," + + " auto_generate_synonyms_phrase_query=true," + + " boost = 0.77," + + " quote_analyzer='standard'," + + " phrase_slop=0," + + " rewrite='constant_score'," + + " type='best_fields'," + + " tie_breaker=0.3," + + " time_zone='Canada/Pacific'," + + " default_operator='or'," + + " fuzzy_transpositions = false," + + " lenient = true," + + " fuzzy_max_expansions = 25," + + " minimum_should_match = '2<-25% 9<-3'," + + " fuzzy_prefix_length = 7);"; JSONObject result = executeJdbcRequest(query); assertEquals(49, result.getInt("total")); } @Test public void wildcard_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string(['Tags'], 'taste')"; + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['Tags'], 'taste')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string(['T*'], 'taste')"; + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['T*'], 'taste')"; JSONObject result2 = executeJdbcRequest(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query_string(['*Date'], '2014-01-22');"; + String query3 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['*Date'], '2014-01-22');"; JSONObject result3 = executeJdbcRequest(query3); assertEquals(10, result3.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java index 5a16cd3f64..e42b68631f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.is; @@ -25,28 +24,29 @@ import org.opensearch.sql.legacy.SQLIntegTestCase; /** - * The query validation IT only covers test for error cases that not doable in comparison test. - * For all other tests, comparison test should be favored over manual written test like this. + * The query validation IT only covers test for error cases that not doable in comparison test. For + * all other tests, comparison test should be favored over manual written test like this. */ public class QueryValidationIT extends SQLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Override protected void init() throws Exception { loadIndex(Index.ACCOUNT); } - @Ignore("Will add this validation in analyzer later. This test should be enabled once " + - "https://github.com/opensearch-project/sql/issues/910 has been resolved") + @Ignore( + "Will add this validation in analyzer later. This test should be enabled once " + + "https://github.com/opensearch-project/sql/issues/910 has been resolved") @Test public void testNonAggregatedSelectColumnMissingInGroupByClause() throws IOException { expectResponseException() .hasStatusCode(BAD_REQUEST) .hasErrorType("SemanticCheckException") - .containsMessage("Expression [state] that contains non-aggregated column " - + "is not present in group by clause") + .containsMessage( + "Expression [state] that contains non-aggregated column " + + "is not present in group by clause") .whenExecute("SELECT state FROM opensearch-sql_test_index_account GROUP BY age"); } @@ -55,8 +55,9 @@ public void testNonAggregatedSelectColumnPresentWithoutGroupByClause() throws IO expectResponseException() .hasStatusCode(BAD_REQUEST) .hasErrorType("SemanticCheckException") - .containsMessage("Explicit GROUP BY clause is required because expression [state] " - + "contains non-aggregated column") + .containsMessage( + "Explicit GROUP BY clause is required because expression [state] " + + "contains non-aggregated column") .whenExecute("SELECT state, AVG(age) FROM opensearch-sql_test_index_account"); } @@ -87,8 +88,7 @@ public ResponseExceptionAssertion expectResponseException() { /** * Response exception assertion helper to assert property value in OpenSearch ResponseException - * and Response inside. This serves as syntax sugar to improve the readability of test - * code. + * and Response inside. This serves as syntax sugar to improve the readability of test code. */ private static class ResponseExceptionAssertion { private final ExpectedException exceptionRule; @@ -100,9 +100,12 @@ private ResponseExceptionAssertion(ExpectedException exceptionRule) { } ResponseExceptionAssertion hasStatusCode(RestStatus code) { - exceptionRule.expect(featureValueOf("statusCode", is(code), - (Function) e -> - RestStatus.fromCode(e.getResponse().getStatusLine().getStatusCode()))); + exceptionRule.expect( + featureValueOf( + "statusCode", + is(code), + (Function) + e -> RestStatus.fromCode(e.getResponse().getStatusLine().getStatusCode()))); return this; } @@ -133,5 +136,4 @@ private static void execute(String query) throws IOException { client().performRequest(request); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java index eb693a4718..9d2861ce98 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE; @@ -26,21 +25,27 @@ public void init() throws IOException { @Test public void rawFormatWithPipeFieldTest() { - String result = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE), "raw"); - assertEquals(StringUtils.format( - "firstname|lastname%n" - + "+Amber JOHnny|Duke Willmington+%n" - + "-Hattie|Bond-%n" - + "=Nanette|Bates=%n" - + "@Dale|Adams@%n" - + "@Elinor|\"Ratliff|||\"%n"), + String result = + executeQuery( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE), + "raw"); + assertEquals( + StringUtils.format( + "firstname|lastname%n" + + "+Amber JOHnny|Duke Willmington+%n" + + "-Hattie|Bond-%n" + + "=Nanette|Bates=%n" + + "@Dale|Adams@%n" + + "@Elinor|\"Ratliff|||\"%n"), result); } @Test public void contentHeaderTest() throws IOException { - String query = String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE); + String query = + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE); String requestBody = makeRequest(query); Request sqlRequest = new Request("POST", "/_plugins/_sql?format=raw"); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java index 26fe735f12..755493c167 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java @@ -24,11 +24,15 @@ public void init() throws IOException { */ @Test public void verify_flags_in_simple_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertNotEquals(result2.getInt("total"), result1.getInt("total")); @@ -44,11 +48,11 @@ public void verify_flags_in_simple_query_string() throws IOException { */ @Test public void verify_escape_in_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=true);"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=true);"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=false);"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=false);"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(0, result1.getInt("total")); assertEquals(8, result2.getInt("total")); @@ -61,11 +65,15 @@ public void verify_escape_in_query_string() throws IOException { */ @Test public void verify_default_operator_in_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string([Title], 'beer taste', default_operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string([Title], 'beer taste', default_operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -73,11 +81,15 @@ public void verify_default_operator_in_query_string() throws IOException { @Test public void verify_default_operator_in_simple_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -85,11 +97,15 @@ public void verify_default_operator_in_simple_query_string() throws IOException @Test public void verify_default_operator_in_multi_match() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE multi_match([Title], 'beer taste', operator='OR')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match([Title], 'beer taste', operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE multi_match([Title], 'beer taste', operator='AND')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match([Title], 'beer taste', operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -97,11 +113,11 @@ public void verify_default_operator_in_multi_match() throws IOException { @Test public void verify_operator_in_match() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='OR')"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='AND')"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java index 30f23547ec..6056a1c416 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import com.google.common.io.Resources; @@ -14,14 +13,12 @@ import java.util.function.Function; import org.junit.Test; -/** - * SQL integration test automated by comparison test framework. - */ +/** SQL integration test automated by comparison test framework. */ public class SQLCorrectnessIT extends CorrectnessTestBase { private static final String ROOT_DIR = "correctness/"; - private static final String[] EXPR_TEST_DIR = { "expressions" }; - private static final String[] QUERY_TEST_DIR = { "queries", "bugfixes" }; + private static final String[] EXPR_TEST_DIR = {"expressions"}; + private static final String[] QUERY_TEST_DIR = {"queries", "bugfixes"}; @Override protected void init() throws Exception { @@ -35,32 +32,30 @@ public void runAllTests() throws Exception { } /** - * Verify queries in files in directories with a converter to preprocess query. - * For example, for expressions it is converted to a SELECT clause before testing. + * Verify queries in files in directories with a converter to preprocess query. For example, for + * expressions it is converted to a SELECT clause before testing. */ @SuppressWarnings("UnstableApiUsage") private void verifyQueries(String[] dirs, Function converter) throws Exception { for (String dir : dirs) { Path dirPath = Paths.get(Resources.getResource(ROOT_DIR + dir).toURI()); Files.walk(dirPath) - .filter(Files::isRegularFile) - .forEach(file -> verifyQueries(file, converter)); + .filter(Files::isRegularFile) + .forEach(file -> verifyQueries(file, converter)); } } - /** - * Comment start with # - */ + /** Comment start with # */ private void verifyQueries(Path file, Function converter) { try { - String[] queries = Files.lines(file) - .filter(line -> !line.startsWith("#")) - .map(converter) - .toArray(String[]::new); + String[] queries = + Files.lines(file) + .filter(line -> !line.startsWith("#")) + .map(converter) + .toArray(String[]::new); verify(queries); } catch (IOException e) { throw new IllegalStateException("Failed to read file: " + file, e); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java index e824b1ab2b..6616746d99 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java @@ -26,6 +26,9 @@ protected void init() throws Exception { } /** + * + * + *
    * "query" : {
    *   "from": 0,
    *   "size": 3,
@@ -83,18 +86,25 @@ protected void init() throws Exception {
    *   ],
    *   "track_scores": true
    * }
+   * 
+ * * @throws IOException */ @Test public void scoreQueryExplainTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where score(matchQuery(address, 'Douglass'), 100) " + - "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Douglass\\\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s " + + "where score(matchQuery(address, 'Douglass'), 100) " + + "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Douglass\\\"")); Assert.assertThat(result, containsString("\\\"boost\\\":100.0")); - Assert.assertThat(result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Hall\\\"")); + Assert.assertThat( + result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Hall\\\"")); Assert.assertThat(result, containsString("\\\"boost\\\":0.5")); Assert.assertThat(result, containsString("\\\"sort\\\":[{\\\"_score\\\"")); Assert.assertThat(result, containsString("\\\"track_scores\\\":true")); @@ -102,26 +112,32 @@ public void scoreQueryExplainTest() throws IOException { @Test public void scoreQueryTest() throws IOException { - final JSONObject result = new JSONObject(executeQuery(String.format(Locale.ROOT, - "select address, _score from %s " + - "where score(matchQuery(address, 'Douglass'), 100) " + - "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc")); - verifySchema(result, - schema("address", null, "text"), - schema("_score", null, "float")); - verifyDataRows(result, - rows("154 Douglass Street", 650.1515), - rows("565 Hall Street", 3.2507575)); + final JSONObject result = + new JSONObject( + executeQuery( + String.format( + Locale.ROOT, + "select address, _score from %s " + + "where score(matchQuery(address, 'Douglass'), 100) " + + "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc")); + verifySchema(result, schema("address", null, "text"), schema("_score", null, "float")); + verifyDataRows( + result, rows("154 Douglass Street", 650.1515), rows("565 Hall Street", 3.2507575)); } @Test public void scoreQueryDefaultBoostExplainTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where score(matchQuery(address, 'Lane')) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Lane\\\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s " + + "where score(matchQuery(address, 'Lane')) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Lane\\\"")); Assert.assertThat(result, containsString("\\\"boost\\\":1.0")); Assert.assertThat(result, containsString("\\\"sort\\\":[{\\\"_score\\\"")); Assert.assertThat(result, containsString("\\\"track_scores\\\":true")); @@ -129,13 +145,16 @@ public void scoreQueryDefaultBoostExplainTest() throws IOException { @Test public void scoreQueryDefaultBoostQueryTest() throws IOException { - final JSONObject result = new JSONObject(executeQuery(String.format(Locale.ROOT, - "select address, _score from %s " + - "where score(matchQuery(address, 'Powell')) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc")); - verifySchema(result, - schema("address", null, "text"), - schema("_score", null, "float")); + final JSONObject result = + new JSONObject( + executeQuery( + String.format( + Locale.ROOT, + "select address, _score from %s " + + "where score(matchQuery(address, 'Powell')) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc")); + verifySchema(result, schema("address", null, "text"), schema("_score", null, "float")); verifyDataRows(result, rows("305 Powell Street", 6.501515)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java index 44f4e5ca9c..8742dedbc7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java @@ -31,43 +31,60 @@ public void init() throws IOException { @Test public void test_mandatory_params() throws IOException { - String query = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(16, result.getInt("total")); } @Test public void test_all_params() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE simple_query_string(['Body', Tags, Title], 'taste beer', default_operator='or'," - + "analyzer=english, analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, flags='PREFIX'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7);"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string(['Body', Tags, Title], " + + "'taste beer'," + + " default_operator='or'," + + " analyzer=english, " + + " analyze_wildcard = false," + + " quote_field_suffix = '.exact'," + + " auto_generate_synonyms_phrase_query=true," + + " boost = 0.77," + + " flags='PREFIX'," + + " fuzzy_transpositions = false," + + " lenient = true," + + " fuzzy_max_expansions = 25," + + " minimum_should_match = '2<-25% 9<-3'," + + " fuzzy_prefix_length = 7);"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(49, result.getInt("total")); } @Test public void verify_wildcard_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string(['Tags'], 'taste')"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE simple_query_string(['Tags'], 'taste')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string(['T*'], 'taste')"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE simple_query_string(['T*'], 'taste')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE simple_query_string(['*Date'], '2014-01-22');"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string(['*Date'], '2014-01-22');"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(10, result.getInt("total")); } @Test public void contentHeaderTest() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + " WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; String requestBody = makeRequest(query); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java index 4738d233bf..e884734c96 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java @@ -66,17 +66,19 @@ public class StandalonePaginationIT extends SQLIntegTestCase { public void init() { RestHighLevelClient restClient = new InternalRestHighLevelClient(client()); client = new OpenSearchRestClient(restClient); - DataSourceService dataSourceService = new DataSourceServiceImpl( - new ImmutableSet.Builder() - .add(new OpenSearchDataSourceFactory(client, defaultSettings())) - .build(), - getDataSourceMetadataStorage(), - getDataSourceUserRoleHelper() - ); + DataSourceService dataSourceService = + new DataSourceServiceImpl( + new ImmutableSet.Builder() + .add(new OpenSearchDataSourceFactory(client, defaultSettings())) + .build(), + getDataSourceMetadataStorage(), + getDataSourceUserRoleHelper()); dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata()); ModulesBuilder modules = new ModulesBuilder(); - modules.add(new StandaloneModule(new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); + modules.add( + new StandaloneModule( + new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); Injector injector = modules.createInjector(); queryService = injector.getInstance(QueryService.class); @@ -85,10 +87,9 @@ public void init() { @Test public void test_pagination_whitebox() throws IOException { - class TestResponder - implements ResponseListener { - @Getter - Cursor cursor = Cursor.None; + class TestResponder implements ResponseListener { + @Getter Cursor cursor = Cursor.None; + @Override public void onResponse(ExecutionEngine.QueryResponse response) { cursor = response.getCursor(); @@ -113,13 +114,16 @@ public void onFailure(Exception e) { // act 1, asserts in firstResponder var t = new OpenSearchIndex(client, defaultSettings(), "test"); - LogicalPlan p = new LogicalPaginate(1, List.of( - new LogicalProject( - new LogicalRelation("test", t), List.of( - DSL.named("name", DSL.ref("name", ExprCoreType.STRING)), - DSL.named("age", DSL.ref("age", ExprCoreType.LONG))), - List.of() - ))); + LogicalPlan p = + new LogicalPaginate( + 1, + List.of( + new LogicalProject( + new LogicalRelation("test", t), + List.of( + DSL.named("name", DSL.ref("name", ExprCoreType.STRING)), + DSL.named("age", DSL.ref("age", ExprCoreType.LONG))), + List.of()))); var firstResponder = new TestResponder(); queryService.executePlan(p, PlanContext.emptyPlanContext(), firstResponder); @@ -139,24 +143,30 @@ public void test_explain_not_supported() { // Request should be rejected before index names are resolved request.setJsonEntity("{ \"query\": \"select * from something\", \"fetch_size\": 10 }"); var exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); - var response = new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); - assertEquals("`explain` feature for paginated requests is not implemented yet.", + var response = + new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); + assertEquals( + "`explain` feature for paginated requests is not implemented yet.", response.getJSONObject("error").getString("details")); // Request should be rejected before cursor parsed request.setJsonEntity("{ \"cursor\" : \"n:0000\" }"); exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); - response = new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); - assertEquals("Explain of a paged query continuation is not supported. Use `explain` for the initial query request.", + response = + new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); + assertEquals( + "Explain of a paged query continuation is not supported. Use `explain` for the initial" + + " query request.", response.getJSONObject("error").getString("details")); } private Settings defaultSettings() { return new Settings() { - private final Map defaultSettings = new ImmutableMap.Builder() - .put(Key.QUERY_SIZE_LIMIT, 200) - .put(Key.SQL_CURSOR_KEEP_ALIVE, TimeValue.timeValueMinutes(1)) - .build(); + private final Map defaultSettings = + new ImmutableMap.Builder() + .put(Key.QUERY_SIZE_LIMIT, 200) + .put(Key.SQL_CURSOR_KEEP_ALIVE, TimeValue.timeValueMinutes(1)) + .build(); @Override public T getSettingValue(Key key) { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java index e54000f80d..2b6f9476c7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -16,51 +15,39 @@ import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; - - public class StringLiteralIT extends SQLIntegTestCase { @Test public void testStringHelloSingleQuote() throws IOException { - JSONObject result = - executeJdbcRequest("select 'Hello'"); - verifySchema(result, - schema("'Hello'", null, "keyword")); + JSONObject result = executeJdbcRequest("select 'Hello'"); + verifySchema(result, schema("'Hello'", null, "keyword")); verifyDataRows(result, rows("Hello")); } @Test public void testStringHelloDoubleQuote() throws IOException { - JSONObject result = - executeJdbcRequest("select \\\"Hello\\\""); - verifySchema(result, - schema("\"Hello\"", null, "keyword")); + JSONObject result = executeJdbcRequest("select \\\"Hello\\\""); + verifySchema(result, schema("\"Hello\"", null, "keyword")); verifyDataRows(result, rows("Hello")); } @Test public void testImStringDoubleDoubleQuoteEscape() throws IOException { - JSONObject result = - executeJdbcRequest("select \\\"I\\\"\\\"m\\\""); - verifySchema(result, - schema("\"I\"\"m\"", null, "keyword")); + JSONObject result = executeJdbcRequest("select \\\"I\\\"\\\"m\\\""); + verifySchema(result, schema("\"I\"\"m\"", null, "keyword")); verifyDataRows(result, rows("I\"m")); } @Test public void testImStringDoubleSingleQuoteEscape() throws IOException { - JSONObject result = - executeJdbcRequest("select 'I''m'"); - verifySchema(result, - schema("'I''m'", null, "keyword")); + JSONObject result = executeJdbcRequest("select 'I''m'"); + verifySchema(result, schema("'I''m'", null, "keyword")); verifyDataRows(result, rows("I'm")); } @Test public void testImStringEscapedSingleQuote() throws IOException { - JSONObject result = - executeJdbcRequest("select 'I\\\\'m'"); - verifySchema(result, - schema("'I\\'m'", null, "keyword")); + JSONObject result = executeJdbcRequest("select 'I\\\\'m'"); + verifySchema(result, schema("'I\\'m'", null, "keyword")); verifyDataRows(result, rows("I'm")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java index 584cdd05dd..4b39e2925c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java @@ -24,37 +24,47 @@ protected void init() throws Exception { @Test public void typeof_sql_types() { - JSONObject response = executeJdbcRequest("SELECT typeof('pewpew'), typeof(NULL), typeof(1.0)," - + "typeof(12345), typeof(1234567891011), typeof(INTERVAL 2 DAY);"); - verifyDataRows(response, - rows("KEYWORD", "UNDEFINED", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); - - response = executeJdbcRequest("SELECT" - + " typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," - + " typeof(CAST('09:07:00' AS TIME))," - + " typeof(CAST('1961-04-12' AS DATE))," - + " typeof(DATETIME('1961-04-12 09:07:00'))"); - verifyDataRows(response, - rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + JSONObject response = + executeJdbcRequest( + "SELECT typeof('pewpew'), typeof(NULL), typeof(1.0)," + + "typeof(12345), typeof(1234567891011), typeof(INTERVAL 2 DAY);"); + verifyDataRows(response, rows("KEYWORD", "UNDEFINED", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); + + response = + executeJdbcRequest( + "SELECT" + + " typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + + " typeof(CAST('09:07:00' AS TIME))," + + " typeof(CAST('1961-04-12' AS DATE))," + + " typeof(DATETIME('1961-04-12 09:07:00'))"); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); } @Test public void typeof_opensearch_types() { - JSONObject response = executeJdbcRequest(String.format("SELECT typeof(double_number)," - + "typeof(long_number), typeof(integer_number), typeof(byte_number), typeof(short_number)," - + "typeof(float_number), typeof(half_float_number), typeof(scaled_float_number)" - + " from %s;", TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, - rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); - - response = executeJdbcRequest(String.format("SELECT typeof(text_value)," - + "typeof(date_value), typeof(boolean_value), typeof(object_value), typeof(keyword_value)," - + "typeof(ip_value), typeof(binary_value), typeof(geo_point_value)" - // TODO activate this test once `ARRAY` type supported, see ExpressionAnalyzer::isTypeNotSupported - //+ ", typeof(nested_value)" - + " from %s;", TEST_INDEX_DATATYPE_NONNUMERIC)); - verifyDataRows(response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", - "IP", "BINARY", "GEO_POINT")); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT typeof(double_number),typeof(long_number), typeof(integer_number)," + + " typeof(byte_number), typeof(short_number),typeof(float_number)," + + " typeof(half_float_number), typeof(scaled_float_number) from %s;", + TEST_INDEX_DATATYPE_NUMERIC)); + verifyDataRows( + response, rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); + + response = + executeJdbcRequest( + String.format( + "SELECT typeof(text_value),typeof(date_value), typeof(boolean_value)," + + " typeof(object_value), typeof(keyword_value),typeof(ip_value)," + + " typeof(binary_value), typeof(geo_point_value)" + // TODO activate this test once `ARRAY` type supported, see + // ExpressionAnalyzer::isTypeNotSupported + // + ", typeof(nested_value)" + + " from %s;", + TEST_INDEX_DATATYPE_NONNUMERIC)); + verifyDataRows( + response, + rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java index 94677354e4..314132fed0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; @@ -43,8 +42,7 @@ void verifyQuery(String query, String type, Integer output) throws IOException { void verifyQueryWithNullOutput(String query, String type) throws IOException { JSONObject result = executeQuery("select 'test null'," + query); - verifySchema(result, schema(query, null, type), - schema("'test null'", null, type)); + verifySchema(result, schema(query, null, type), schema("'test null'", null, type)); verifyDataRows(result, rows("test null", null)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java index 030c07c5fa..8123f887f2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; @@ -25,11 +24,17 @@ protected void init() throws Exception { public void test_wildcard_query_asterisk_function() throws IOException { String expected = "test wildcard"; - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 't*') LIMIT 1"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 't*') LIMIT 1"; JSONObject result1 = executeJdbcRequest(query1); verifyDataRows(result1, rows(expected)); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcardquery(KeywordBody, 't*') LIMIT 1"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcardquery(KeywordBody, 't*') LIMIT 1"; JSONObject result2 = executeJdbcRequest(query2); verifyDataRows(result2, rows(expected)); } @@ -38,11 +43,17 @@ public void test_wildcard_query_asterisk_function() throws IOException { public void test_wildcard_query_question_mark_function() throws IOException { String expected = "test wildcard"; - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test wild??rd')"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test wild??rd')"; JSONObject result1 = executeJdbcRequest(query1); verifyDataRows(result1, rows(expected)); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcardquery(KeywordBody, 'test wild??rd')"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcardquery(KeywordBody, 'test wild??rd')"; JSONObject result2 = executeJdbcRequest(query2); verifyDataRows(result2, rows(expected)); } @@ -50,11 +61,17 @@ public void test_wildcard_query_question_mark_function() throws IOException { // SQL uses ? as a wildcard which is converted to * in WildcardQuery.java @Test public void test_wildcard_query_sql_wildcard_percent_conversion() throws IOException { - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test%')"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test%')"; JSONObject result1 = executeJdbcRequest(query1); assertEquals(8, result1.getInt("total")); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test*')"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test*')"; JSONObject result2 = executeJdbcRequest(query2); assertEquals(result1.getInt("total"), result2.getInt("total")); } @@ -62,27 +79,41 @@ public void test_wildcard_query_sql_wildcard_percent_conversion() throws IOExcep // SQL uses _ as a wildcard which is converted to ? in WildcardQuery.java @Test public void test_wildcard_query_sql_wildcard_underscore_conversion() throws IOException { - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test wild_ard*')"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test wild_ard*')"; JSONObject result1 = executeJdbcRequest(query1); assertEquals(7, result1.getInt("total")); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test wild?ard*')"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test wild?ard*')"; JSONObject result2 = executeJdbcRequest(query2); assertEquals(result1.getInt("total"), result2.getInt("total")); } @Test public void test_escaping_wildcard_percent_in_the_beginning_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '\\\\%*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '\\\\%*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("%test wildcard in the beginning of the text")); } @Test public void test_escaping_wildcard_percent_in_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\%%')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\%%')"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, rows("test wildcard in % the middle of the text"), + verifyDataRows( + result, + rows("test wildcard in % the middle of the text"), rows("test wildcard %% beside each other"), rows("test wildcard in the end of the text%"), rows("%test wildcard in the beginning of the text")); @@ -90,30 +121,44 @@ public void test_escaping_wildcard_percent_in_text() throws IOException { @Test public void test_escaping_wildcard_percent_in_the_end_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\%')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\%')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test wildcard in the end of the text%")); } @Test public void test_double_escaped_wildcard_percent() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\%\\\\%*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\%\\\\%*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test wildcard %% beside each other")); } @Test public void test_escaping_wildcard_underscore_in_the_beginning_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '\\\\_*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '\\\\_*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("_test wildcard in the beginning of the text")); } @Test public void test_escaping_wildcard_underscore_in_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\_*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\_*')"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, rows("test wildcard in _ the middle of the text"), + verifyDataRows( + result, + rows("test wildcard in _ the middle of the text"), rows("test wildcard __ beside each other"), rows("test wildcard in the end of the text_"), rows("_test wildcard in the beginning of the text"), @@ -122,60 +167,77 @@ public void test_escaping_wildcard_underscore_in_text() throws IOException { @Test public void test_escaping_wildcard_underscore_in_the_end_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\_')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\_')"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("test wildcard in the end of the text_"), - rows("test backslash wildcard \\_")); + verifyDataRows( + result, rows("test wildcard in the end of the text_"), rows("test backslash wildcard \\_")); } @Test public void test_double_escaped_wildcard_underscore() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\_\\\\_*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\_\\\\_*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test wildcard __ beside each other")); } @Test public void test_backslash_wildcard() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\_')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\_')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test backslash wildcard \\_")); } @Test public void all_params_test() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD - + " WHERE wildcard_query(KeywordBody, 'test*', boost = 0.9," - + " case_insensitive=true, rewrite='constant_score')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test*', boost = 0.9," + + " case_insensitive=true, rewrite='constant_score')"; JSONObject result = executeJdbcRequest(query); assertEquals(8, result.getInt("total")); } @Test public void test_wildcard_query_on_text_field_with_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test*')"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test*')"; JSONObject result = executeJdbcRequest(query); assertEquals(9, result.getInt("total")); } @Test public void test_wildcard_query_on_text_keyword_field_with_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextKeywordBody, 'test*')"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextKeywordBody, 'test*')"; JSONObject result = executeJdbcRequest(query); assertEquals(9, result.getInt("total")); } @Test public void test_wildcard_query_on_text_field_with_greater_than_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test wild*')"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test wild*')"; JSONObject result = executeJdbcRequest(query); assertEquals(0, result.getInt("total")); } @Test - public void test_wildcard_query_on_text_keyword_field_with_greater_than_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextKeywordBody, 'test wild*')"; + public void test_wildcard_query_on_text_keyword_field_with_greater_than_one_word() + throws IOException { + String query = + "SELECT * FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(TextKeywordBody, 'test wild*')"; JSONObject result = executeJdbcRequest(query); assertEquals(0, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java index b586125af3..86257e6a22 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -25,11 +24,16 @@ protected void init() throws Exception { @Test public void testOrderByNullFirst() { - JSONObject response = new JSONObject( - executeQuery("SELECT age, ROW_NUMBER() OVER(ORDER BY age DESC NULLS FIRST) " - + "FROM " + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc")); + JSONObject response = + new JSONObject( + executeQuery( + "SELECT age, ROW_NUMBER() OVER(ORDER BY age DESC NULLS FIRST) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + "jdbc")); - verifyDataRows(response, + verifyDataRows( + response, rows(null, 1), rows(36, 2), rows(36, 3), @@ -41,11 +45,16 @@ public void testOrderByNullFirst() { @Test public void testOrderByNullLast() { - JSONObject response = new JSONObject( - executeQuery("SELECT age, ROW_NUMBER() OVER(ORDER BY age NULLS LAST) " - + "FROM " + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc")); + JSONObject response = + new JSONObject( + executeQuery( + "SELECT age, ROW_NUMBER() OVER(ORDER BY age NULLS LAST) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + "jdbc")); - verifyDataRows(response, + verifyDataRows( + response, rows(28, 1), rows(32, 2), rows(33, 3), @@ -57,10 +66,15 @@ public void testOrderByNullLast() { @Test public void testDistinctCountOverNull() { - JSONObject response = new JSONObject(executeQuery( - "SELECT lastname, COUNT(DISTINCT gender) OVER() " - + "FROM " + TestsConstants.TEST_INDEX_BANK, "jdbc")); - verifyDataRows(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT lastname, COUNT(DISTINCT gender) OVER() " + + "FROM " + + TestsConstants.TEST_INDEX_BANK, + "jdbc")); + verifyDataRows( + response, rows("Duke Willmington", 2), rows("Bond", 2), rows("Bates", 2), @@ -72,10 +86,15 @@ public void testDistinctCountOverNull() { @Test public void testDistinctCountOver() { - JSONObject response = new JSONObject(executeQuery( - "SELECT lastname, COUNT(DISTINCT gender) OVER(ORDER BY lastname) " - + "FROM " + TestsConstants.TEST_INDEX_BANK, "jdbc")); - verifyDataRowsInOrder(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT lastname, COUNT(DISTINCT gender) OVER(ORDER BY lastname) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK, + "jdbc")); + verifyDataRowsInOrder( + response, rows("Adams", 1), rows("Ayala", 2), rows("Bates", 2), @@ -87,10 +106,15 @@ public void testDistinctCountOver() { @Test public void testDistinctCountPartition() { - JSONObject response = new JSONObject(executeQuery( - "SELECT lastname, COUNT(DISTINCT gender) OVER(PARTITION BY gender ORDER BY lastname) " - + "FROM " + TestsConstants.TEST_INDEX_BANK, "jdbc")); - verifyDataRowsInOrder(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT lastname, COUNT(DISTINCT gender) OVER(PARTITION BY gender ORDER BY" + + " lastname) FROM " + + TestsConstants.TEST_INDEX_BANK, + "jdbc")); + verifyDataRowsInOrder( + response, rows("Ayala", 1), rows("Bates", 1), rows("Mcpherson", 1), @@ -99,5 +123,4 @@ public void testDistinctCountPartition() { rows("Duke Willmington", 1), rows("Ratliff", 1)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java b/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java index 0a42dc83e3..39437ffc5f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java @@ -12,7 +12,7 @@ /** * ONLY USED FOR TEST PURPOSE. * - * Execute {@link AbstractPlan} on caller thread. + *

Execute {@link AbstractPlan} on caller thread. */ public class ExecuteOnCallerThreadQueryManager implements QueryManager { @Override diff --git a/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java b/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java index 57726089ae..0897a508e3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java @@ -9,9 +9,7 @@ import org.opensearch.client.RestClient; import org.opensearch.client.RestHighLevelClient; -/** - * Internal RestHighLevelClient only for testing purpose. - */ +/** Internal RestHighLevelClient only for testing purpose. */ public class InternalRestHighLevelClient extends RestHighLevelClient { public InternalRestHighLevelClient(RestClient restClient) { super(restClient, RestClient::close, Collections.emptyList()); diff --git a/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java b/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java index 4cb2aa299d..d444218c66 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.util; import static org.hamcrest.MatcherAssert.assertThat; @@ -45,16 +44,15 @@ public class MatcherUtils { /** * Assert field value in object by a custom matcher and getter to access the field. * - * @param name description + * @param name description * @param subMatcher sub-matcher for field - * @param getter getter function to access the field - * @param type of outer object - * @param type of inner field + * @param getter getter function to access the field + * @param type of outer object + * @param type of inner field * @return matcher */ - public static FeatureMatcher featureValueOf(String name, - Matcher subMatcher, - Function getter) { + public static FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { return new FeatureMatcher(subMatcher, name, name) { @Override protected U featureValueOf(T actual) { @@ -68,8 +66,8 @@ public static Matcher hits(Matcher... hitMatchers) { if (hitMatchers.length == 0) { return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); } - return featureValueOf("SearchHits", arrayContainingInAnyOrder(hitMatchers), - SearchHits::getHits); + return featureValueOf( + "SearchHits", arrayContainingInAnyOrder(hitMatchers), SearchHits::getHits); } @SafeVarargs @@ -92,14 +90,17 @@ public static Matcher> kv(String key, Object value) { } public static Matcher hitAny(String query, Matcher... matcher) { - return featureValueOf("SearchHits", hasItems(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query(query)); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; - }); + return featureValueOf( + "SearchHits", + hasItems(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query(query)); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; + }); } public static Matcher hitAny(Matcher... matcher) { @@ -107,14 +108,17 @@ public static Matcher hitAny(Matcher... matcher) { } public static Matcher hitAll(Matcher... matcher) { - return featureValueOf("SearchHits", containsInAnyOrder(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query("/hits/hits")); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; - }); + return featureValueOf( + "SearchHits", + containsInAnyOrder(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query("/hits/hits")); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; + }); } public static Matcher kvString(String key, Matcher matcher) { @@ -122,7 +126,8 @@ public static Matcher kvString(String key, Matcher matcher) } public static Matcher kvDouble(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> ((BigDecimal) actual.query(key)).doubleValue()); + return featureValueOf( + "Json Match", matcher, actual -> ((BigDecimal) actual.query(key)).doubleValue()); } public static Matcher kvInt(String key, Matcher matcher) { @@ -196,19 +201,18 @@ public static void verifyOrder(JSONArray array, Matcher... matchers) { assertThat(objects, containsInRelativeOrder(matchers)); } - public static TypeSafeMatcher schema(String expectedName, - String expectedType) { + public static TypeSafeMatcher schema(String expectedName, String expectedType) { return schema(expectedName, null, expectedType); } - public static TypeSafeMatcher schema(String expectedName, String expectedAlias, - String expectedType) { + public static TypeSafeMatcher schema( + String expectedName, String expectedAlias, String expectedType) { return new TypeSafeMatcher() { @Override public void describeTo(Description description) { description.appendText( - String - .format("(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); + String.format( + "(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); } @Override @@ -216,9 +220,9 @@ protected boolean matchesSafely(JSONObject jsonObject) { String actualName = (String) jsonObject.query("/name"); String actualAlias = (String) jsonObject.query("/alias"); String actualType = (String) jsonObject.query("/type"); - return expectedName.equals(actualName) && - (Strings.isNullOrEmpty(expectedAlias) || expectedAlias.equals(actualAlias)) && - expectedType.equals(actualType); + return expectedName.equals(actualName) + && (Strings.isNullOrEmpty(expectedAlias) || expectedAlias.equals(actualAlias)) + && expectedType.equals(actualType); } }; } @@ -288,10 +292,7 @@ public void describeTo(Description description) { }; } - - /** - * Tests if a string is equal to another string, ignore the case and whitespace. - */ + /** Tests if a string is equal to another string, ignore the case and whitespace. */ public static class IsEqualIgnoreCaseAndWhiteSpace extends TypeSafeMatcher { private final String string; @@ -314,7 +315,8 @@ public void describeMismatchSafely(String item, Description mismatchDescription) @Override public void describeTo(Description description) { - description.appendText("a string equal to ") + description + .appendText("a string equal to ") .appendValue(string) .appendText(" ignore case and white space"); } @@ -334,13 +336,11 @@ public static Matcher equalToIgnoreCaseAndWhiteSpace(String expectedStri /** * Compare two JSON string are equals. + * * @param expected expected JSON string. * @param actual actual JSON string. */ public static void assertJsonEquals(String expected, String actual) { - assertEquals( - JsonParser.parseString(expected), - JsonParser.parseString(actual) - ); + assertEquals(JsonParser.parseString(expected), JsonParser.parseString(actual)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java index ad8afc47ca..5d6f0b5a55 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java @@ -37,8 +37,8 @@ import org.opensearch.sql.storage.StorageEngine; /** - * A utility class which registers SQL engine singletons as `OpenSearchPluginModule` does. - * It is needed to get access to those instances in test and validate their behavior. + * A utility class which registers SQL engine singletons as `OpenSearchPluginModule` does. It is + * needed to get access to those instances in test and validate their behavior. */ @RequiredArgsConstructor public class StandaloneModule extends AbstractModule { @@ -53,8 +53,7 @@ public class StandaloneModule extends AbstractModule { BuiltinFunctionRepository.getInstance(); @Override - protected void configure() { - } + protected void configure() {} @Provides public OpenSearchClient openSearchClient() { @@ -67,8 +66,8 @@ public StorageEngine storageEngine(OpenSearchClient client) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, - PlanSerializer planSerializer) { + public ExecutionEngine executionEngine( + OpenSearchClient client, ExecutionProtector protector, PlanSerializer planSerializer) { return new OpenSearchExecutionEngine(client, protector, planSerializer); } diff --git a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java index 3281c172cb..589fb1f9ae 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.util; import static com.google.common.base.Strings.isNullOrEmpty; @@ -44,9 +43,9 @@ public class TestUtils { /** * Create test index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); @@ -57,16 +56,16 @@ public static void createIndexByRestClient(RestClient client, String indexName, } /** - * https://github.com/elastic/elasticsearch/pull/49959 - * Deprecate creation of dot-prefixed index names except for hidden and system indices. - * Create hidden index by REST client. + * https://github.com/elastic/elasticsearch/pull/49959
+ * Deprecate creation of dot-prefixed index names except for hidden and system indices. Create + * hidden index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ - public static void createHiddenIndexByRestClient(RestClient client, String indexName, - String mapping) { + public static void createHiddenIndexByRestClient( + RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); @@ -76,11 +75,11 @@ public static void createHiddenIndexByRestClient(RestClient client, String index } /** - * Check if index already exists by OpenSearch index exists API which returns: - * 200 - specified indices or aliases exist + * Check if index already exists by OpenSearch index exists API which returns:
+ * 200 - specified indices or aliases exist
* 404 - one or more indices specified or aliases do not exist * - * @param client client connection + * @param client client connection * @param indexName index name * @return true for index exist */ @@ -96,13 +95,13 @@ public static boolean isIndexExist(RestClient client, String indexName) { /** * Load test data set by REST client. * - * @param client client connection - * @param indexName index name + * @param client client connection + * @param indexName index name * @param dataSetFilePath file path of test data set * @throws IOException */ - public static void loadDataByRestClient(RestClient client, String indexName, - String dataSetFilePath) throws IOException { + public static void loadDataByRestClient( + RestClient client, String indexName, String dataSetFilePath) throws IOException { Path path = Paths.get(getResourceFilePath(dataSetFilePath)); Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); request.setJsonEntity(new String(Files.readAllBytes(path))); @@ -112,7 +111,7 @@ public static void loadDataByRestClient(RestClient client, String indexName, /** * Perform a request by REST client. * - * @param client client connection + * @param client client connection * @param request request object */ public static Response performRequest(RestClient client, Request request) { @@ -129,566 +128,567 @@ public static Response performRequest(RestClient client, Request request) { } public static String getAccountIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"address\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"address\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; } public static String getPhraseIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"phrase\": {\n" + - " \"type\": \"text\",\n" + - " \"store\": true\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"phrase\": {\n" + + " \"type\": \"text\",\n" + + " \"store\": true\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDogIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDogs2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDogs3IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"color\": {\n" + - " \"type\": \"text\"\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"color\": {\n" + + " \"type\": \"text\"\n" + + " }" + + " }" + + " }" + + "}"; } public static String getPeople2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"keyword\"\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; } public static String getGameOfThronesIndexMapping() { - return "{ \"mappings\": { " + - " \"properties\": {\n" + - " \"nickname\": {\n" + - " \"type\":\"text\", " + - " \"fielddata\":true" + - " },\n" + - " \"name\": {\n" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"ofHerName\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"ofHisName\": {\n" + - " \"type\": \"integer\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"house\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }" + - "} } }"; + return "{ \"mappings\": { " + + " \"properties\": {\n" + + " \"nickname\": {\n" + + " \"type\":\"text\", " + + " \"fielddata\":true" + + " },\n" + + " \"name\": {\n" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"ofHerName\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"ofHisName\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"house\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }" + + "} } }"; } // System public static String getOdbcIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"odbc_time\":{\n" + - "\t\t\t\t\"type\":\"date\",\n" + - "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + - "\t\t\t},\n" + - "\t\t\t\"docCount\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"odbc_time\":{\n" + + "\t\t\t\t\"type\":\"date\",\n" + + "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + + "\t\t\t},\n" + + "\t\t\t\"docCount\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; } public static String getLocationIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"place\":{\n" + - "\t\t\t\t\"type\":\"geo_shape\"\n" + - //"\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in OpenSearch - //"\t\t\t\t\"precision\": \"10km\"\n" + - "\t\t\t},\n" + - "\t\t\t\"center\":{\n" + - "\t\t\t\t\"type\":\"geo_point\"\n" + - "\t\t\t},\n" + - "\t\t\t\"description\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"place\":{\n" + + "\t\t\t\t\"type\":\"geo_shape\"\n" + + + // "\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in + // OpenSearch + // "\t\t\t\t\"precision\": \"10km\"\n" + + "\t\t\t},\n" + + "\t\t\t\"center\":{\n" + + "\t\t\t\t\"type\":\"geo_point\"\n" + + "\t\t\t},\n" + + "\t\t\t\"description\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; } public static String getEmployeeNestedTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"comments\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"date\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"message\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"address\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"city\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"title\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}\n"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"comments\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"date\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"message\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"address\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"city\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"title\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; } - public static String getNestedTypeIndexMapping() { - return "{ \"mappings\": {\n" + - " \"properties\": {\n" + - " \"message\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"info\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"author\": {\n" + - " \"type\": \"keyword\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\" : 256\n" + - " }\n" + - " },\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"comment\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"data\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"myNum\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"someField\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }}"; + return "{ \"mappings\": {\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"info\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"author\": {\n" + + " \"type\": \"keyword\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\" : 256\n" + + " }\n" + + " },\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"comment\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"data\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"myNum\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"someField\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }}"; } public static String getJoinTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"join_field\": {\n" + - " \"type\": \"join\",\n" + - " \"relations\": {\n" + - " \"parentType\": \"childrenType\"\n" + - " }\n" + - " },\n" + - " \"parentTile\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"author\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"info\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"join_field\": {\n" + + " \"type\": \"join\",\n" + + " \"relations\": {\n" + + " \"parentType\": \"childrenType\"\n" + + " }\n" + + " },\n" + + " \"parentTile\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"author\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"info\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getBankIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"birthdate\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"email\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"male\": {\n" + - " \"type\": \"boolean\"\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"birthdate\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"email\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"male\": {\n" + + " \"type\": \"boolean\"\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getBankWithNullValuesIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getOrderIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getWeblogsIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"host\": {\n" + - " \"type\": \"ip\"\n" + - " },\n" + - " \"method\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"url\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"response\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"bytes\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"host\": {\n" + + " \"type\": \"ip\"\n" + + " },\n" + + " \"method\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"url\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"response\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"bytes\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getDateIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"date_keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"date_keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDateTimeIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"birthday\": {" + - " \"type\": \"date\"" + - " }" + - " }" + - " }" + - "}"; + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"birthday\": {" + + " \"type\": \"date\"" + + " }" + + " }" + + " }" + + "}"; } public static String getNestedSimpleIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"address\": {" + - " \"type\": \"nested\"," + - " \"properties\": {" + - " \"city\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }," + - " \"state\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }," + - " \"age\": {" + - " \"type\": \"long\"" + - " }," + - " \"id\": {" + - " \"type\": \"long\"" + - " }," + - " \"name\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }" + - "}"; + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"address\": {" + + " \"type\": \"nested\"," + + " \"properties\": {" + + " \"city\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }," + + " \"state\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }," + + " \"age\": {" + + " \"type\": \"long\"" + + " }," + + " \"id\": {" + + " \"type\": \"long\"" + + " }," + + " \"name\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; } public static void loadBulk(Client client, String jsonPath, String defaultIndex) @@ -698,8 +698,8 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkRequest bulkRequest = new BulkRequest(); try (final InputStream stream = new FileInputStream(absJsonPath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { while (true) { @@ -728,8 +728,11 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); if (bulkResponse.hasFailures()) { - throw new Exception("Failed to load test data into index " + defaultIndex + ", " + - bulkResponse.buildFailureMessage()); + throw new Exception( + "Failed to load test data into index " + + defaultIndex + + ", " + + bulkResponse.buildFailureMessage()); } System.out.println(bulkResponse.getItems().length + " documents loaded."); // ensure the documents are searchable @@ -755,8 +758,8 @@ public static String getResponseBody(Response response, boolean retainNewLines) final StringBuilder sb = new StringBuilder(); try (final InputStream is = response.getEntity().getContent(); - final BufferedReader br = new BufferedReader( - new InputStreamReader(is, StandardCharsets.UTF_8))) { + final BufferedReader br = + new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { @@ -769,15 +772,14 @@ public static String getResponseBody(Response response, boolean retainNewLines) return sb.toString(); } - public static String fileToString(final String filePathFromProjectRoot, - final boolean removeNewLines) - throws IOException { + public static String fileToString( + final String filePathFromProjectRoot, final boolean removeNewLines) throws IOException { final String absolutePath = getResourceFilePath(filePathFromProjectRoot); try (final InputStream stream = new FileInputStream(absolutePath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { final StringBuilder stringBuilder = new StringBuilder(); String line = br.readLine(); @@ -831,36 +833,41 @@ public static List> getPermutations(final List items) { } final String currentItem = items.get(i); - result.addAll(getPermutations(smallerSet).stream().map(smallerSetPermutation -> { - final List permutation = new ArrayList<>(); - permutation.add(currentItem); - permutation.addAll(smallerSetPermutation); - return permutation; - }).collect(Collectors.toCollection(LinkedList::new))); + result.addAll( + getPermutations(smallerSet).stream() + .map( + smallerSetPermutation -> { + final List permutation = new ArrayList<>(); + permutation.add(currentItem); + permutation.addAll(smallerSetPermutation); + return permutation; + }) + .collect(Collectors.toCollection(LinkedList::new))); } return result; } public static void verifyIsV1Cursor(JSONObject response) { - var legacyCursorPrefixes = Arrays.stream(CursorType.values()) - .map(c -> c.getId() + ":").collect(Collectors.toList()); + var legacyCursorPrefixes = + Arrays.stream(CursorType.values()).map(c -> c.getId() + ":").collect(Collectors.toList()); verifyCursor(response, legacyCursorPrefixes, "v1"); } - public static void verifyIsV2Cursor(JSONObject response) { verifyCursor(response, List.of(CURSOR_PREFIX), "v2"); } - private static void verifyCursor(JSONObject response, List validCursorPrefix, String engineName) { - assertTrue("'cursor' property does not exist", response.has("cursor")); + private static void verifyCursor( + JSONObject response, List validCursorPrefix, String engineName) { + assertTrue("'cursor' property does not exist", response.has("cursor")); - var cursor = response.getString("cursor"); - assertFalse("'cursor' property is empty", cursor.isEmpty()); - assertTrue("The cursor '" + cursor.substring(0, 50) + "...' is not from " + engineName + " engine.", - validCursorPrefix.stream().anyMatch(cursor::startsWith)); - } + var cursor = response.getString("cursor"); + assertFalse("'cursor' property is empty", cursor.isEmpty()); + assertTrue( + "The cursor '" + cursor.substring(0, 50) + "...' is not from " + engineName + " engine.", + validCursorPrefix.stream().anyMatch(cursor::startsWith)); + } public static void verifyNoCursor(JSONObject response) { assertTrue(!response.has("cursor")); diff --git a/plugin/build.gradle b/plugin/build.gradle index 11f97ea857..8ec6844bfd 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -85,6 +85,9 @@ publishing { } } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + javadoc.enabled = false loggerUsageCheck.enabled = false dependencyLicenses.enabled = false diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java index 5e156c2f5d..f20de87d61 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java @@ -94,10 +94,10 @@ public class SQLPlugin extends Plugin implements ActionPlugin, ScriptPlugin { private static final Logger LOGGER = LogManager.getLogger(SQLPlugin.class); private ClusterService clusterService; - /** - * Settings should be inited when bootstrap the plugin. - */ + + /** Settings should be inited when bootstrap the plugin. */ private org.opensearch.sql.common.setting.Settings pluginSettings; + private NodeClient client; private DataSourceServiceImpl dataSourceService; private Injector injector; @@ -134,23 +134,28 @@ public List getRestHandlers( new RestDataSourceQueryAction()); } - /** - * Register action and handler so that transportClient can find proxy for action. - */ + /** Register action and handler so that transportClient can find proxy for action. */ @Override public List> getActions() { return Arrays.asList( new ActionHandler<>( new ActionType<>(PPLQueryAction.NAME, TransportPPLQueryResponse::new), TransportPPLQueryAction.class), - new ActionHandler<>(new ActionType<>(TransportCreateDataSourceAction.NAME, - CreateDataSourceActionResponse::new), TransportCreateDataSourceAction.class), - new ActionHandler<>(new ActionType<>(TransportGetDataSourceAction.NAME, - GetDataSourceActionResponse::new), TransportGetDataSourceAction.class), - new ActionHandler<>(new ActionType<>(TransportUpdateDataSourceAction.NAME, - UpdateDataSourceActionResponse::new), TransportUpdateDataSourceAction.class), - new ActionHandler<>(new ActionType<>(TransportDeleteDataSourceAction.NAME, - DeleteDataSourceActionResponse::new), TransportDeleteDataSourceAction.class)); + new ActionHandler<>( + new ActionType<>( + TransportCreateDataSourceAction.NAME, CreateDataSourceActionResponse::new), + TransportCreateDataSourceAction.class), + new ActionHandler<>( + new ActionType<>(TransportGetDataSourceAction.NAME, GetDataSourceActionResponse::new), + TransportGetDataSourceAction.class), + new ActionHandler<>( + new ActionType<>( + TransportUpdateDataSourceAction.NAME, UpdateDataSourceActionResponse::new), + TransportUpdateDataSourceAction.class), + new ActionHandler<>( + new ActionType<>( + TransportDeleteDataSourceAction.NAME, DeleteDataSourceActionResponse::new), + TransportDeleteDataSourceAction.class)); } @Override @@ -176,11 +181,12 @@ public Collection createComponents( ModulesBuilder modules = new ModulesBuilder(); modules.add(new OpenSearchPluginModule()); - modules.add(b -> { - b.bind(NodeClient.class).toInstance((NodeClient) client); - b.bind(org.opensearch.sql.common.setting.Settings.class).toInstance(pluginSettings); - b.bind(DataSourceService.class).toInstance(dataSourceService); - }); + modules.add( + b -> { + b.bind(NodeClient.class).toInstance((NodeClient) client); + b.bind(org.opensearch.sql.common.setting.Settings.class).toInstance(pluginSettings); + b.bind(DataSourceService.class).toInstance(dataSourceService); + }); injector = modules.createInjector(); return ImmutableList.of(dataSourceService); @@ -212,30 +218,31 @@ public ScriptEngine getScriptEngine(Settings settings, Collection() - .add(new OpenSearchDataSourceFactory( - new OpenSearchNodeClient(this.client), pluginSettings)) + .add( + new OpenSearchDataSourceFactory( + new OpenSearchNodeClient(this.client), pluginSettings)) .add(new PrometheusStorageFactory(pluginSettings)) .add(new SparkStorageFactory(this.client, pluginSettings)) .build(), dataSourceMetadataStorage, dataSourceUserAuthorizationHelper); } - } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java b/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java index f301a242fb..33a785c498 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java @@ -45,8 +45,7 @@ public class OpenSearchPluginModule extends AbstractModule { BuiltinFunctionRepository.getInstance(); @Override - protected void configure() { - } + protected void configure() {} @Provides public OpenSearchClient openSearchClient(NodeClient nodeClient) { @@ -59,8 +58,8 @@ public StorageEngine storageEngine(OpenSearchClient client, Settings settings) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, - PlanSerializer planSerializer) { + public ExecutionEngine executionEngine( + OpenSearchClient client, ExecutionProtector protector, PlanSerializer planSerializer) { return new OpenSearchExecutionEngine(client, protector, planSerializer); } @@ -95,18 +94,15 @@ public SQLService sqlService(QueryManager queryManager, QueryPlanFactory queryPl return new SQLService(new SQLSyntaxParser(), queryManager, queryPlanFactory); } - /** - * {@link QueryPlanFactory}. - */ + /** {@link QueryPlanFactory}. */ @Provides - public QueryPlanFactory queryPlanFactory(DataSourceService dataSourceService, - ExecutionEngine executionEngine) { + public QueryPlanFactory queryPlanFactory( + DataSourceService dataSourceService, ExecutionEngine executionEngine) { Analyzer analyzer = new Analyzer( new ExpressionAnalyzer(functionRepository), dataSourceService, functionRepository); Planner planner = new Planner(LogicalPlanOptimizer.create()); - QueryService queryService = new QueryService( - analyzer, executionEngine, planner); + QueryService queryService = new QueryService(analyzer, executionEngine, planner); return new QueryPlanFactory(queryService); } } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java b/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java index 730da0e923..ad734bf150 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.plugin.request; import java.util.Map; @@ -15,9 +14,7 @@ import org.opensearch.sql.protocol.response.format.Format; import org.opensearch.sql.protocol.response.format.JsonResponseFormatter; -/** - * Factory of {@link PPLQueryRequest}. - */ +/** Factory of {@link PPLQueryRequest}. */ public class PPLQueryRequestFactory { private static final String PPL_URL_PARAM_KEY = "ppl"; private static final String PPL_FIELD_NAME = "query"; @@ -28,6 +25,7 @@ public class PPLQueryRequestFactory { /** * Build {@link PPLQueryRequest} from {@link RestRequest}. + * * @param request {@link PPLQueryRequest} * @return {@link RestRequest} */ @@ -63,8 +61,12 @@ private static PPLQueryRequest parsePPLRequestFromPayload(RestRequest restReques } catch (JSONException e) { throw new IllegalArgumentException("Failed to parse request payload", e); } - PPLQueryRequest pplRequest = new PPLQueryRequest(jsonContent.getString(PPL_FIELD_NAME), - jsonContent, restRequest.path(), format.getFormatName()); + PPLQueryRequest pplRequest = + new PPLQueryRequest( + jsonContent.getString(PPL_FIELD_NAME), + jsonContent, + restRequest.path(), + format.getFormatName()); // set sanitize option if csv format if (format.equals(Format.CSV)) { pplRequest.sanitize(getSanitizeOption(restRequest.params())); diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java index 55f8dfdfef..996ae8c700 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java @@ -102,14 +102,17 @@ protected Set responseParams() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient nodeClient) { // TODO: need move to transport Action if (!pplEnabled.get()) { - return channel -> reportError(channel, new IllegalAccessException( - "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is false"), - BAD_REQUEST); + return channel -> + reportError( + channel, + new IllegalAccessException( + "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is" + + " false"), + BAD_REQUEST); } - TransportPPLQueryRequest transportPPLQueryRequest = new TransportPPLQueryRequest( - PPLQueryRequestFactory.getPPLRequest(request) - ); + TransportPPLQueryRequest transportPPLQueryRequest = + new TransportPPLQueryRequest(PPLQueryRequestFactory.getPPLRequest(request)); return channel -> nodeClient.execute( diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java index ef9f68a2a7..7a51fc282b 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.plugin.rest; import static org.opensearch.core.rest.RestStatus.SERVICE_UNAVAILABLE; @@ -26,17 +25,14 @@ import org.opensearch.sql.legacy.executor.format.ErrorMessageFactory; import org.opensearch.sql.legacy.metrics.Metrics; -/** - * PPL Node level status. - */ +/** PPL Node level status. */ public class RestPPLStatsAction extends BaseRestHandler { private static final Logger LOG = LogManager.getLogger(RestPPLStatsAction.class); - /** - * API endpoint path. - */ + /** API endpoint path. */ public static final String PPL_STATS_API_ENDPOINT = "/_plugins/_ppl/stats"; + public static final String PPL_LEGACY_STATS_API_ENDPOINT = "/_opendistro/_ppl/stats"; public RestPPLStatsAction(Settings settings, RestController restController) { @@ -70,13 +66,18 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli QueryContext.addRequestId(); try { - return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.OK, - Metrics.getInstance().collectToJSON())); + return channel -> + channel.sendResponse( + new BytesRestResponse(RestStatus.OK, Metrics.getInstance().collectToJSON())); } catch (Exception e) { LOG.error("Failed during Query PPL STATS Action.", e); - return channel -> channel.sendResponse(new BytesRestResponse(SERVICE_UNAVAILABLE, - ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()).toString())); + return channel -> + channel.sendResponse( + new BytesRestResponse( + SERVICE_UNAVAILABLE, + ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()) + .toString())); } } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java index b15b4dddd6..885c953c17 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java @@ -39,9 +39,14 @@ public class RestQuerySettingsAction extends BaseRestHandler { private static final String LEGACY_SQL_SETTINGS_PREFIX = "opendistro.sql."; private static final String LEGACY_PPL_SETTINGS_PREFIX = "opendistro.ppl."; private static final String LEGACY_COMMON_SETTINGS_PREFIX = "opendistro.query."; - private static final List SETTINGS_PREFIX = ImmutableList.of( - SQL_SETTINGS_PREFIX, PPL_SETTINGS_PREFIX, COMMON_SETTINGS_PREFIX, - LEGACY_SQL_SETTINGS_PREFIX, LEGACY_PPL_SETTINGS_PREFIX, LEGACY_COMMON_SETTINGS_PREFIX); + private static final List SETTINGS_PREFIX = + ImmutableList.of( + SQL_SETTINGS_PREFIX, + PPL_SETTINGS_PREFIX, + COMMON_SETTINGS_PREFIX, + LEGACY_SQL_SETTINGS_PREFIX, + LEGACY_PPL_SETTINGS_PREFIX, + LEGACY_COMMON_SETTINGS_PREFIX); public static final String SETTINGS_API_ENDPOINT = "/_plugins/_query/settings"; public static final String LEGACY_SQL_SETTINGS_API_ENDPOINT = "/_opendistro/_sql/settings"; @@ -75,10 +80,11 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli QueryContext.addRequestId(); final ClusterUpdateSettingsRequest clusterUpdateSettingsRequest = Requests.clusterUpdateSettingsRequest(); - clusterUpdateSettingsRequest.timeout(request.paramAsTime( - "timeout", clusterUpdateSettingsRequest.timeout())); - clusterUpdateSettingsRequest.clusterManagerNodeTimeout(request.paramAsTime( - "cluster_manager_timeout", clusterUpdateSettingsRequest.clusterManagerNodeTimeout())); + clusterUpdateSettingsRequest.timeout( + request.paramAsTime("timeout", clusterUpdateSettingsRequest.timeout())); + clusterUpdateSettingsRequest.clusterManagerNodeTimeout( + request.paramAsTime( + "cluster_manager_timeout", clusterUpdateSettingsRequest.clusterManagerNodeTimeout())); Map source; try (XContentParser parser = request.contentParser()) { source = parser.map(); @@ -86,20 +92,27 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli try { if (source.containsKey(TRANSIENT)) { - clusterUpdateSettingsRequest.transientSettings(getAndFilterSettings( - (Map) source.get(TRANSIENT))); + clusterUpdateSettingsRequest.transientSettings( + getAndFilterSettings((Map) source.get(TRANSIENT))); } if (source.containsKey(PERSISTENT)) { - clusterUpdateSettingsRequest.persistentSettings(getAndFilterSettings( - (Map) source.get(PERSISTENT))); + clusterUpdateSettingsRequest.persistentSettings( + getAndFilterSettings((Map) source.get(PERSISTENT))); } - return channel -> client.admin().cluster().updateSettings( - clusterUpdateSettingsRequest, new RestToXContentListener<>(channel)); + return channel -> + client + .admin() + .cluster() + .updateSettings(clusterUpdateSettingsRequest, new RestToXContentListener<>(channel)); } catch (Exception e) { LOG.error("Error changing OpenSearch SQL plugin cluster settings", e); - return channel -> channel.sendResponse(new BytesRestResponse(INTERNAL_SERVER_ERROR, - ErrorMessageFactory.createErrorMessage(e, INTERNAL_SERVER_ERROR.getStatus()).toString())); + return channel -> + channel.sendResponse( + new BytesRestResponse( + INTERNAL_SERVER_ERROR, + ErrorMessageFactory.createErrorMessage(e, INTERNAL_SERVER_ERROR.getStatus()) + .toString())); } } @@ -107,16 +120,19 @@ private Settings getAndFilterSettings(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - Settings.Builder settingsBuilder = Settings.builder() - .loadFromSource(builder.toString(), builder.contentType()); - settingsBuilder.keys().removeIf(key -> { - for (String prefix : SETTINGS_PREFIX) { - if (key.startsWith(prefix)) { - return false; - } - } - return true; - }); + Settings.Builder settingsBuilder = + Settings.builder().loadFromSource(builder.toString(), builder.contentType()); + settingsBuilder + .keys() + .removeIf( + key -> { + for (String prefix : SETTINGS_PREFIX) { + if (key.startsWith(prefix)) { + return false; + } + } + return true; + }); return settingsBuilder.build(); } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e); diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java index 8a9d276673..fde9e24f75 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java @@ -139,8 +139,8 @@ private ResponseListener createListener( @Override public void onResponse(ExecutionEngine.QueryResponse response) { String responseContent = - formatter.format(new QueryResult(response.getSchema(), response.getResults(), - response.getCursor())); + formatter.format( + new QueryResult(response.getSchema(), response.getResults(), response.getCursor())); listener.onResponse(new TransportPPLQueryResponse(responseContent)); } diff --git a/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java b/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java index 0e5d99ae35..286ac20fed 100644 --- a/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java +++ b/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java @@ -59,9 +59,7 @@ public void writeTo(StreamOutput out) throws IOException { @Test public void testCustomizedNullJSONContentActionRequestFromActionRequest() { - TransportPPLQueryRequest request = new TransportPPLQueryRequest( - "source=t a=1", null, null - ); + TransportPPLQueryRequest request = new TransportPPLQueryRequest("source=t a=1", null, null); ActionRequest actionRequest = new ActionRequest() { @Override diff --git a/ppl/build.gradle b/ppl/build.gradle index 484934ddc3..e16b6decfc 100644 --- a/ppl/build.gradle +++ b/ppl/build.gradle @@ -29,6 +29,11 @@ plugins { id 'antlr' } +// Being ignored as a temporary measure before being removed in favour of +// spotless https://github.com/opensearch-project/sql/issues/1101 +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + generateGrammarSource { arguments += ['-visitor', '-package', 'org.opensearch.sql.ppl.antlr.parser'] source = sourceSets.main.antlr diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java b/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java index 40a7a85f78..7769f5dfae 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.executor.ExecutionEngine.QueryResponse; @@ -27,9 +26,7 @@ import org.opensearch.sql.ppl.parser.AstStatementBuilder; import org.opensearch.sql.ppl.utils.PPLQueryDataAnonymizer; -/** - * PPLService. - */ +/** PPLService. */ @RequiredArgsConstructor public class PPLService { private final PPLSyntaxParser parser; @@ -45,7 +42,7 @@ public class PPLService { /** * Execute the {@link PPLQueryRequest}, using {@link ResponseListener} to get response. * - * @param request {@link PPLQueryRequest} + * @param request {@link PPLQueryRequest} * @param listener {@link ResponseListener} */ public void execute(PPLQueryRequest request, ResponseListener listener) { @@ -57,10 +54,10 @@ public void execute(PPLQueryRequest request, ResponseListener lis } /** - * Explain the query in {@link PPLQueryRequest} using {@link ResponseListener} to - * get and format explain response. + * Explain the query in {@link PPLQueryRequest} using {@link ResponseListener} to get and format + * explain response. * - * @param request {@link PPLQueryRequest} + * @param request {@link PPLQueryRequest} * @param listener {@link ResponseListener} for explain response */ public void explain(PPLQueryRequest request, ResponseListener listener) { @@ -90,7 +87,6 @@ private AbstractPlan plan( QueryContext.getRequestId(), anonymizer.anonymizeStatement(statement)); - return queryExecutionFactory.create( - statement, queryListener, explainListener); + return queryExecutionFactory.create(statement, queryListener, explainListener); } } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java b/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java index 168ba33a8a..1d4485e749 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.antlr; import org.antlr.v4.runtime.CommonTokenStream; @@ -15,13 +14,9 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLLexer; import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser; -/** - * PPL Syntax Parser. - */ +/** PPL Syntax Parser. */ public class PPLSyntaxParser implements Parser { - /** - * Analyze the query syntax. - */ + /** Analyze the query syntax. */ @Override public ParseTree parse(String query) { OpenSearchPPLParser parser = createParser(createLexer(query)); @@ -30,12 +25,10 @@ public ParseTree parse(String query) { } private OpenSearchPPLParser createParser(Lexer lexer) { - return new OpenSearchPPLParser( - new CommonTokenStream(lexer)); + return new OpenSearchPPLParser(new CommonTokenStream(lexer)); } private OpenSearchPPLLexer createLexer(String query) { - return new OpenSearchPPLLexer( - new CaseInsensitiveCharStream(query)); + return new OpenSearchPPLLexer(new CaseInsensitiveCharStream(query)); } } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java index 87532e01d0..ca351fcc0a 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; import java.util.Locale; @@ -22,12 +21,9 @@ public class PPLQueryRequest { public static final PPLQueryRequest NULL = new PPLQueryRequest("", null, DEFAULT_PPL_PATH, ""); private final String pplQuery; - @Getter - private final JSONObject jsonContent; - @Getter - private final String path; - @Getter - private String format = ""; + @Getter private final JSONObject jsonContent; + @Getter private final String path; + @Getter private String format = ""; @Setter @Getter @@ -43,9 +39,7 @@ public PPLQueryRequest(String pplQuery, JSONObject jsonContent, String path) { this(pplQuery, jsonContent, path, ""); } - /** - * Constructor of PPLQueryRequest. - */ + /** Constructor of PPLQueryRequest. */ public PPLQueryRequest(String pplQuery, JSONObject jsonContent, String path, String format) { this.pplQuery = pplQuery; this.jsonContent = jsonContent; @@ -59,23 +53,21 @@ public String getRequest() { /** * Check if request is to explain rather than execute the query. - * @return true if it is a explain request + * + * @return true if it is a explain request */ public boolean isExplainRequest() { return path.endsWith("/_explain"); } - /** - * Decide on the formatter by the requested format. - */ + /** Decide on the formatter by the requested format. */ public Format format() { Optional optionalFormat = Format.of(format); if (optionalFormat.isPresent()) { return optionalFormat.get(); } else { throw new IllegalArgumentException( - String.format(Locale.ROOT,"response in %s format is not supported.", format)); + String.format(Locale.ROOT, "response in %s format is not supported.", format)); } } - } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java index 483726702a..5cae8e8f06 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java @@ -3,8 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; -public class PPLQueryResponse { -} +public class PPLQueryResponse {} diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java index 323f99a5af..3c693fa0bd 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static org.opensearch.sql.ast.dsl.AstDSL.qualifiedName; @@ -74,33 +73,25 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; import org.opensearch.sql.ppl.utils.ArgumentFactory; -/** - * Class of building the AST. - * Refines the visit path and build the AST nodes - */ +/** Class of building the AST. Refines the visit path and build the AST nodes */ @RequiredArgsConstructor public class AstBuilder extends OpenSearchPPLParserBaseVisitor { private final AstExpressionBuilder expressionBuilder; /** - * PPL query to get original token text. This is necessary because token.getText() returns - * text without whitespaces or other characters discarded by lexer. + * PPL query to get original token text. This is necessary because token.getText() returns text + * without whitespaces or other characters discarded by lexer. */ private final String query; @Override public UnresolvedPlan visitQueryStatement(OpenSearchPPLParser.QueryStatementContext ctx) { UnresolvedPlan pplCommand = visit(ctx.pplCommands()); - return ctx.commands() - .stream() - .map(this::visit) - .reduce(pplCommand, (r, e) -> e.attach(r)); + return ctx.commands().stream().map(this::visit).reduce(pplCommand, (r, e) -> e.attach(r)); } - /** - * Search command. - */ + /** Search command. */ @Override public UnresolvedPlan visitSearchFrom(SearchFromContext ctx) { return visitFromClause(ctx.fromClause()); @@ -108,23 +99,22 @@ public UnresolvedPlan visitSearchFrom(SearchFromContext ctx) { @Override public UnresolvedPlan visitSearchFromFilter(SearchFromFilterContext ctx) { - return new Filter(internalVisitExpression(ctx.logicalExpression())).attach( - visit(ctx.fromClause())); + return new Filter(internalVisitExpression(ctx.logicalExpression())) + .attach(visit(ctx.fromClause())); } @Override public UnresolvedPlan visitSearchFilterFrom(SearchFilterFromContext ctx) { - return new Filter(internalVisitExpression(ctx.logicalExpression())).attach( - visit(ctx.fromClause())); + return new Filter(internalVisitExpression(ctx.logicalExpression())) + .attach(visit(ctx.fromClause())); } /** - * Describe command. - * Current logic separates table and metadata info about table by adding - * MAPPING_ODFE_SYS_TABLE as suffix. - * Even with the introduction of datasource and schema name in fully qualified table name, - * we do the same thing by appending MAPPING_ODFE_SYS_TABLE as syffix to the last part - * of qualified name. + * Describe command.
+ * Current logic separates table and metadata info about table by adding MAPPING_ODFE_SYS_TABLE as + * suffix. Even with the introduction of datasource and schema name in fully qualified table name, + * we do the same thing by appending MAPPING_ODFE_SYS_TABLE as syffix to the last part of + * qualified name. */ @Override public UnresolvedPlan visitDescribeCommand(DescribeCommandContext ctx) { @@ -135,63 +125,52 @@ public UnresolvedPlan visitDescribeCommand(DescribeCommandContext ctx) { return new Relation(new QualifiedName(parts)); } - /** - * Show command. - */ + /** Show command. */ @Override public UnresolvedPlan visitShowDataSourcesCommand( OpenSearchPPLParser.ShowDataSourcesCommandContext ctx) { return new Relation(qualifiedName(DATASOURCES_TABLE_NAME)); } - - /** - * Where command. - */ + /** Where command. */ @Override public UnresolvedPlan visitWhereCommand(WhereCommandContext ctx) { return new Filter(internalVisitExpression(ctx.logicalExpression())); } - /** - * Fields command. - */ + /** Fields command. */ @Override public UnresolvedPlan visitFieldsCommand(FieldsCommandContext ctx) { return new Project( - ctx.fieldList() - .fieldExpression() - .stream() + ctx.fieldList().fieldExpression().stream() .map(this::internalVisitExpression) .collect(Collectors.toList()), - ArgumentFactory.getArgumentList(ctx) - ); + ArgumentFactory.getArgumentList(ctx)); } - /** - * Rename command. - */ + /** Rename command. */ @Override public UnresolvedPlan visitRenameCommand(RenameCommandContext ctx) { return new Rename( - ctx.renameClasue() - .stream() - .map(ct -> new Map(internalVisitExpression(ct.orignalField), - internalVisitExpression(ct.renamedField))) - .collect(Collectors.toList()) - ); + ctx.renameClasue().stream() + .map( + ct -> + new Map( + internalVisitExpression(ct.orignalField), + internalVisitExpression(ct.renamedField))) + .collect(Collectors.toList())); } - /** - * Stats command. - */ + /** Stats command. */ @Override public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { ImmutableList.Builder aggListBuilder = new ImmutableList.Builder<>(); for (OpenSearchPPLParser.StatsAggTermContext aggCtx : ctx.statsAggTerm()) { UnresolvedExpression aggExpression = internalVisitExpression(aggCtx.statsFunction()); - String name = aggCtx.alias == null ? getTextInQuery(aggCtx) : StringUtils - .unquoteIdentifier(aggCtx.alias.getText()); + String name = + aggCtx.alias == null + ? getTextInQuery(aggCtx) + : StringUtils.unquoteIdentifier(aggCtx.alias.getText()); Alias alias = new Alias(name, aggExpression); aggListBuilder.add(alias); } @@ -199,12 +178,16 @@ public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { List groupList = Optional.ofNullable(ctx.statsByClause()) .map(OpenSearchPPLParser.StatsByClauseContext::fieldList) - .map(expr -> expr.fieldExpression().stream() - .map(groupCtx -> - (UnresolvedExpression) new Alias( - StringUtils.unquoteIdentifier(getTextInQuery(groupCtx)), - internalVisitExpression(groupCtx))) - .collect(Collectors.toList())) + .map( + expr -> + expr.fieldExpression().stream() + .map( + groupCtx -> + (UnresolvedExpression) + new Alias( + StringUtils.unquoteIdentifier(getTextInQuery(groupCtx)), + internalVisitExpression(groupCtx))) + .collect(Collectors.toList())) .orElse(Collections.emptyList()); UnresolvedExpression span = @@ -213,30 +196,23 @@ public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { .map(this::internalVisitExpression) .orElse(null); - Aggregation aggregation = new Aggregation( - aggListBuilder.build(), - Collections.emptyList(), - groupList, - span, - ArgumentFactory.getArgumentList(ctx) - ); + Aggregation aggregation = + new Aggregation( + aggListBuilder.build(), + Collections.emptyList(), + groupList, + span, + ArgumentFactory.getArgumentList(ctx)); return aggregation; } - /** - * Dedup command. - */ + /** Dedup command. */ @Override public UnresolvedPlan visitDedupCommand(DedupCommandContext ctx) { - return new Dedupe( - ArgumentFactory.getArgumentList(ctx), - getFieldList(ctx.fieldList()) - ); + return new Dedupe(ArgumentFactory.getArgumentList(ctx), getFieldList(ctx.fieldList())); } - /** - * Head command visitor. - */ + /** Head command visitor. */ @Override public UnresolvedPlan visitHeadCommand(HeadCommandContext ctx) { Integer size = ctx.number != null ? Integer.parseInt(ctx.number.getText()) : 10; @@ -244,58 +220,46 @@ public UnresolvedPlan visitHeadCommand(HeadCommandContext ctx) { return new Head(size, from); } - /** - * Sort command. - */ + /** Sort command. */ @Override public UnresolvedPlan visitSortCommand(SortCommandContext ctx) { return new Sort( - ctx.sortbyClause() - .sortField() - .stream() + ctx.sortbyClause().sortField().stream() .map(sort -> (Field) internalVisitExpression(sort)) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); } - /** - * Eval command. - */ + /** Eval command. */ @Override public UnresolvedPlan visitEvalCommand(EvalCommandContext ctx) { return new Eval( - ctx.evalClause() - .stream() + ctx.evalClause().stream() .map(ct -> (Let) internalVisitExpression(ct)) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); } private List getGroupByList(ByClauseContext ctx) { - return ctx.fieldList().fieldExpression().stream().map(this::internalVisitExpression) + return ctx.fieldList().fieldExpression().stream() + .map(this::internalVisitExpression) .collect(Collectors.toList()); } private List getFieldList(FieldListContext ctx) { - return ctx.fieldExpression() - .stream() + return ctx.fieldExpression().stream() .map(field -> (Field) internalVisitExpression(field)) .collect(Collectors.toList()); } - /** - * Rare command. - */ + /** Rare command. */ @Override public UnresolvedPlan visitRareCommand(RareCommandContext ctx) { - List groupList = ctx.byClause() == null ? Collections.emptyList() : - getGroupByList(ctx.byClause()); + List groupList = + ctx.byClause() == null ? Collections.emptyList() : getGroupByList(ctx.byClause()); return new RareTopN( CommandType.RARE, ArgumentFactory.getArgumentList(ctx), getFieldList(ctx.fieldList()), - groupList - ); + groupList); } @Override @@ -319,34 +283,31 @@ public UnresolvedPlan visitPatternsCommand(OpenSearchPPLParser.PatternsCommandCo UnresolvedExpression sourceField = internalVisitExpression(ctx.source_field); ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.patternsParameter() - .forEach(x -> { - builder.put(x.children.get(0).toString(), - (Literal) internalVisitExpression(x.children.get(2))); - }); + .forEach( + x -> { + builder.put( + x.children.get(0).toString(), + (Literal) internalVisitExpression(x.children.get(2))); + }); java.util.Map arguments = builder.build(); Literal pattern = arguments.getOrDefault("pattern", AstDSL.stringLiteral("")); return new Parse(ParseMethod.PATTERNS, sourceField, pattern, arguments); } - /** - * Top command. - */ + /** Top command. */ @Override public UnresolvedPlan visitTopCommand(TopCommandContext ctx) { - List groupList = ctx.byClause() == null ? Collections.emptyList() : - getGroupByList(ctx.byClause()); + List groupList = + ctx.byClause() == null ? Collections.emptyList() : getGroupByList(ctx.byClause()); return new RareTopN( CommandType.TOP, ArgumentFactory.getArgumentList(ctx), getFieldList(ctx.fieldList()), - groupList - ); + groupList); } - /** - * From clause. - */ + /** From clause. */ @Override public UnresolvedPlan visitFromClause(FromClauseContext ctx) { if (ctx.tableFunction() != null) { @@ -358,34 +319,31 @@ public UnresolvedPlan visitFromClause(FromClauseContext ctx) { @Override public UnresolvedPlan visitTableSourceClause(TableSourceClauseContext ctx) { - return new Relation(ctx.tableSource() - .stream().map(this::internalVisitExpression) - .collect(Collectors.toList())); + return new Relation( + ctx.tableSource().stream().map(this::internalVisitExpression).collect(Collectors.toList())); } @Override public UnresolvedPlan visitTableFunction(TableFunctionContext ctx) { ImmutableList.Builder builder = ImmutableList.builder(); - ctx.functionArgs().functionArg().forEach(arg - -> { - String argName = (arg.ident() != null) ? arg.ident().getText() : null; - builder.add( - new UnresolvedArgument(argName, - this.internalVisitExpression(arg.valueExpression()))); - }); + ctx.functionArgs() + .functionArg() + .forEach( + arg -> { + String argName = (arg.ident() != null) ? arg.ident().getText() : null; + builder.add( + new UnresolvedArgument( + argName, this.internalVisitExpression(arg.valueExpression()))); + }); return new TableFunction(this.internalVisitExpression(ctx.qualifiedName()), builder.build()); } - /** - * Navigate to & build AST expression. - */ + /** Navigate to & build AST expression. */ private UnresolvedExpression internalVisitExpression(ParseTree tree) { return expressionBuilder.visit(tree); } - /** - * Simply return non-default value for now. - */ + /** Simply return non-default value for now. */ @Override protected UnresolvedPlan aggregateResult(UnresolvedPlan aggregate, UnresolvedPlan nextResult) { if (nextResult != defaultResult()) { @@ -394,52 +352,48 @@ protected UnresolvedPlan aggregateResult(UnresolvedPlan aggregate, UnresolvedPla return aggregate; } - /** - * Kmeans command. - */ + /** Kmeans command. */ @Override public UnresolvedPlan visitKmeansCommand(KmeansCommandContext ctx) { ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.kmeansParameter() - .forEach(x -> { - builder.put(x.children.get(0).toString(), - (Literal) internalVisitExpression(x.children.get(2))); - }); + .forEach( + x -> { + builder.put( + x.children.get(0).toString(), + (Literal) internalVisitExpression(x.children.get(2))); + }); return new Kmeans(builder.build()); } - /** - * AD command. - */ + /** AD command. */ @Override public UnresolvedPlan visitAdCommand(AdCommandContext ctx) { ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.adParameter() - .forEach(x -> { - builder.put(x.children.get(0).toString(), - (Literal) internalVisitExpression(x.children.get(2))); - }); + .forEach( + x -> { + builder.put( + x.children.get(0).toString(), + (Literal) internalVisitExpression(x.children.get(2))); + }); return new AD(builder.build()); } - /** - * ml command. - */ + /** ml command. */ @Override public UnresolvedPlan visitMlCommand(OpenSearchPPLParser.MlCommandContext ctx) { ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.mlArg() - .forEach(x -> { - builder.put(x.argName.getText(), - (Literal) internalVisitExpression(x.argValue)); - }); + .forEach( + x -> { + builder.put(x.argName.getText(), (Literal) internalVisitExpression(x.argValue)); + }); return new ML(builder.build()); } - /** - * Get original text in query. - */ + /** Get original text in query. */ private String getTextInQuery(ParserRuleContext ctx) { Token start = ctx.getStart(); Token stop = ctx.getStop(); diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java index c775747ec4..690e45d67c 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static org.opensearch.sql.expression.function.BuiltinFunctionName.IS_NOT_NULL; @@ -83,33 +82,25 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; import org.opensearch.sql.ppl.utils.ArgumentFactory; -/** - * Class of building AST Expression nodes. - */ +/** Class of building AST Expression nodes. */ public class AstExpressionBuilder extends OpenSearchPPLParserBaseVisitor { private static final int DEFAULT_TAKE_FUNCTION_SIZE_VALUE = 10; - /** - * The function name mapping between fronted and core engine. - */ + /** The function name mapping between fronted and core engine. */ private static Map FUNCTION_NAME_MAPPING = new ImmutableMap.Builder() .put("isnull", IS_NULL.getName().getFunctionName()) .put("isnotnull", IS_NOT_NULL.getName().getFunctionName()) .build(); - /** - * Eval clause. - */ + /** Eval clause. */ @Override public UnresolvedExpression visitEvalClause(EvalClauseContext ctx) { return new Let((Field) visit(ctx.fieldExpression()), visit(ctx.expression())); } - /** - * Logical expression excluding boolean, comparison. - */ + /** Logical expression excluding boolean, comparison. */ @Override public UnresolvedExpression visitLogicalNot(LogicalNotContext ctx) { return new Not(visit(ctx.logicalExpression())); @@ -130,9 +121,7 @@ public UnresolvedExpression visitLogicalXor(LogicalXorContext ctx) { return new Xor(visit(ctx.left), visit(ctx.right)); } - /** - * Comparison expression. - */ + /** Comparison expression. */ @Override public UnresolvedExpression visitCompareExpr(CompareExprContext ctx) { return new Compare(ctx.comparisonOperator().getText(), visit(ctx.left), visit(ctx.right)); @@ -142,22 +131,16 @@ public UnresolvedExpression visitCompareExpr(CompareExprContext ctx) { public UnresolvedExpression visitInExpr(InExprContext ctx) { return new In( visit(ctx.valueExpression()), - ctx.valueList() - .literalValue() - .stream() + ctx.valueList().literalValue().stream() .map(this::visitLiteralValue) .collect(Collectors.toList())); } - /** - * Value Expression. - */ + /** Value Expression. */ @Override public UnresolvedExpression visitBinaryArithmetic(BinaryArithmeticContext ctx) { return new Function( - ctx.binaryOperator.getText(), - Arrays.asList(visit(ctx.left), visit(ctx.right)) - ); + ctx.binaryOperator.getText(), Arrays.asList(visit(ctx.left), visit(ctx.right))); } @Override @@ -165,9 +148,7 @@ public UnresolvedExpression visitParentheticValueExpr(ParentheticValueExprContex return visit(ctx.valueExpression()); // Discard parenthesis around } - /** - * Field expression. - */ + /** Field expression. */ @Override public UnresolvedExpression visitFieldExpression(FieldExpressionContext ctx) { return new Field((QualifiedName) visit(ctx.qualifiedName())); @@ -182,13 +163,10 @@ public UnresolvedExpression visitWcFieldExpression(WcFieldExpressionContext ctx) public UnresolvedExpression visitSortField(SortFieldContext ctx) { return new Field( visit(ctx.sortFieldExpression().fieldExpression().qualifiedName()), - ArgumentFactory.getArgumentList(ctx) - ); + ArgumentFactory.getArgumentList(ctx)); } - /** - * Aggregation function. - */ + /** Aggregation function. */ @Override public UnresolvedExpression visitStatsFunctionCall(StatsFunctionCallContext ctx) { return new AggregateFunction(ctx.statsFunctionName().getText(), visit(ctx.valueExpression())); @@ -206,7 +184,9 @@ public UnresolvedExpression visitDistinctCountFunctionCall(DistinctCountFunction @Override public UnresolvedExpression visitPercentileAggFunction(PercentileAggFunctionContext ctx) { - return new AggregateFunction(ctx.PERCENTILE().getText(), visit(ctx.aggField), + return new AggregateFunction( + ctx.PERCENTILE().getText(), + visit(ctx.aggField), Collections.singletonList(new Argument("rank", (Literal) visit(ctx.value)))); } @@ -214,34 +194,32 @@ public UnresolvedExpression visitPercentileAggFunction(PercentileAggFunctionCont public UnresolvedExpression visitTakeAggFunctionCall( OpenSearchPPLParser.TakeAggFunctionCallContext ctx) { ImmutableList.Builder builder = ImmutableList.builder(); - builder.add(new UnresolvedArgument("size", - ctx.takeAggFunction().size != null ? visit(ctx.takeAggFunction().size) : - AstDSL.intLiteral(DEFAULT_TAKE_FUNCTION_SIZE_VALUE))); - return new AggregateFunction("take", visit(ctx.takeAggFunction().fieldExpression()), - builder.build()); + builder.add( + new UnresolvedArgument( + "size", + ctx.takeAggFunction().size != null + ? visit(ctx.takeAggFunction().size) + : AstDSL.intLiteral(DEFAULT_TAKE_FUNCTION_SIZE_VALUE))); + return new AggregateFunction( + "take", visit(ctx.takeAggFunction().fieldExpression()), builder.build()); } - /** - * Eval function. - */ + /** Eval function. */ @Override public UnresolvedExpression visitBooleanFunctionCall(BooleanFunctionCallContext ctx) { final String functionName = ctx.conditionFunctionBase().getText(); - return buildFunction(FUNCTION_NAME_MAPPING.getOrDefault(functionName, functionName), + return buildFunction( + FUNCTION_NAME_MAPPING.getOrDefault(functionName, functionName), ctx.functionArgs().functionArg()); } - /** - * Eval function. - */ + /** Eval function. */ @Override public UnresolvedExpression visitEvalFunctionCall(EvalFunctionCallContext ctx) { return buildFunction(ctx.evalFunctionName().getText(), ctx.functionArgs().functionArg()); } - /** - * Cast function. - */ + /** Cast function. */ @Override public UnresolvedExpression visitDataTypeFunctionCall(DataTypeFunctionCallContext ctx) { return new Cast(visit(ctx.expression()), visit(ctx.convertedDataType())); @@ -252,15 +230,10 @@ public UnresolvedExpression visitConvertedDataType(ConvertedDataTypeContext ctx) return AstDSL.stringLiteral(ctx.getText()); } - private Function buildFunction(String functionName, - List args) { + private Function buildFunction( + String functionName, List args) { return new Function( - functionName, - args - .stream() - .map(this::visitFunctionArg) - .collect(Collectors.toList()) - ); + functionName, args.stream().map(this::visitFunctionArg).collect(Collectors.toList())); } @Override @@ -290,70 +263,62 @@ public UnresolvedExpression visitTableSource(TableSourceContext ctx) { @Override public UnresolvedExpression visitPositionFunction( - OpenSearchPPLParser.PositionFunctionContext ctx) { + OpenSearchPPLParser.PositionFunctionContext ctx) { return new Function( - POSITION.getName().getFunctionName(), - Arrays.asList(visitFunctionArg(ctx.functionArg(0)), - visitFunctionArg(ctx.functionArg(1)))); + POSITION.getName().getFunctionName(), + Arrays.asList(visitFunctionArg(ctx.functionArg(0)), visitFunctionArg(ctx.functionArg(1)))); } @Override public UnresolvedExpression visitExtractFunctionCall( - OpenSearchPPLParser.ExtractFunctionCallContext ctx) { + OpenSearchPPLParser.ExtractFunctionCallContext ctx) { return new Function( - ctx.extractFunction().EXTRACT().toString(), - getExtractFunctionArguments(ctx)); + ctx.extractFunction().EXTRACT().toString(), getExtractFunctionArguments(ctx)); } private List getExtractFunctionArguments( - OpenSearchPPLParser.ExtractFunctionCallContext ctx) { - List args = Arrays.asList( + OpenSearchPPLParser.ExtractFunctionCallContext ctx) { + List args = + Arrays.asList( new Literal(ctx.extractFunction().datetimePart().getText(), DataType.STRING), - visitFunctionArg(ctx.extractFunction().functionArg()) - ); + visitFunctionArg(ctx.extractFunction().functionArg())); return args; } @Override public UnresolvedExpression visitGetFormatFunctionCall( - OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { + OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { return new Function( - ctx.getFormatFunction().GET_FORMAT().toString(), - getFormatFunctionArguments(ctx)); + ctx.getFormatFunction().GET_FORMAT().toString(), getFormatFunctionArguments(ctx)); } private List getFormatFunctionArguments( - OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { - List args = Arrays.asList( + OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { + List args = + Arrays.asList( new Literal(ctx.getFormatFunction().getFormatType().getText(), DataType.STRING), - visitFunctionArg(ctx.getFormatFunction().functionArg()) - ); + visitFunctionArg(ctx.getFormatFunction().functionArg())); return args; } @Override public UnresolvedExpression visitTimestampFunctionCall( - OpenSearchPPLParser.TimestampFunctionCallContext ctx) { + OpenSearchPPLParser.TimestampFunctionCallContext ctx) { return new Function( - ctx.timestampFunction().timestampFunctionName().getText(), - timestampFunctionArguments(ctx)); + ctx.timestampFunction().timestampFunctionName().getText(), timestampFunctionArguments(ctx)); } private List timestampFunctionArguments( - OpenSearchPPLParser.TimestampFunctionCallContext ctx) { - List args = Arrays.asList( - new Literal( - ctx.timestampFunction().simpleDateTimePart().getText(), - DataType.STRING), + OpenSearchPPLParser.TimestampFunctionCallContext ctx) { + List args = + Arrays.asList( + new Literal(ctx.timestampFunction().simpleDateTimePart().getText(), DataType.STRING), visitFunctionArg(ctx.timestampFunction().firstArg), - visitFunctionArg(ctx.timestampFunction().secondArg) - ); + visitFunctionArg(ctx.timestampFunction().secondArg)); return args; } - /** - * Literal and value. - */ + /** Literal and value. */ @Override public UnresolvedExpression visitIdentsAsQualifiedName(IdentsAsQualifiedNameContext ctx) { return visitIdentifiers(ctx.ident()); @@ -406,8 +371,10 @@ public UnresolvedExpression visitBooleanLiteral(BooleanLiteralContext ctx) { @Override public UnresolvedExpression visitBySpanClause(BySpanClauseContext ctx) { String name = ctx.spanClause().getText(); - return ctx.alias != null ? new Alias(name, visit(ctx.spanClause()), StringUtils - .unquoteIdentifier(ctx.alias.getText())) : new Alias(name, visit(ctx.spanClause())); + return ctx.alias != null + ? new Alias( + name, visit(ctx.spanClause()), StringUtils.unquoteIdentifier(ctx.alias.getText())) + : new Alias(name, visit(ctx.spanClause())); } @Override @@ -421,8 +388,7 @@ private QualifiedName visitIdentifiers(List ctx) { ctx.stream() .map(RuleContext::getText) .map(StringUtils::unquoteIdentifier) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); } private List singleFieldRelevanceArguments( @@ -430,13 +396,21 @@ private List singleFieldRelevanceArguments( // all the arguments are defaulted to string values // to skip environment resolving and function signature resolving ImmutableList.Builder builder = ImmutableList.builder(); - builder.add(new UnresolvedArgument("field", - new QualifiedName(StringUtils.unquoteText(ctx.field.getText())))); - builder.add(new UnresolvedArgument("query", - new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); - ctx.relevanceArg().forEach(v -> builder.add(new UnresolvedArgument( - v.relevanceArgName().getText().toLowerCase(), new Literal(StringUtils.unquoteText( - v.relevanceArgValue().getText()), DataType.STRING)))); + builder.add( + new UnresolvedArgument( + "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText())))); + builder.add( + new UnresolvedArgument( + "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); + ctx.relevanceArg() + .forEach( + v -> + builder.add( + new UnresolvedArgument( + v.relevanceArgName().getText().toLowerCase(), + new Literal( + StringUtils.unquoteText(v.relevanceArgValue().getText()), + DataType.STRING)))); return builder.build(); } @@ -445,19 +419,26 @@ private List multiFieldRelevanceArguments( // all the arguments are defaulted to string values // to skip environment resolving and function signature resolving ImmutableList.Builder builder = ImmutableList.builder(); - var fields = new RelevanceFieldList(ctx - .getRuleContexts(OpenSearchPPLParser.RelevanceFieldAndWeightContext.class) - .stream() - .collect(Collectors.toMap( - f -> StringUtils.unquoteText(f.field.getText()), - f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText())))); + var fields = + new RelevanceFieldList( + ctx.getRuleContexts(OpenSearchPPLParser.RelevanceFieldAndWeightContext.class).stream() + .collect( + Collectors.toMap( + f -> StringUtils.unquoteText(f.field.getText()), + f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText())))); builder.add(new UnresolvedArgument("fields", fields)); - builder.add(new UnresolvedArgument("query", - new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); - ctx.relevanceArg().forEach(v -> builder.add(new UnresolvedArgument( - v.relevanceArgName().getText().toLowerCase(), new Literal(StringUtils.unquoteText( - v.relevanceArgValue().getText()), DataType.STRING)))); + builder.add( + new UnresolvedArgument( + "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); + ctx.relevanceArg() + .forEach( + v -> + builder.add( + new UnresolvedArgument( + v.relevanceArgName().getText().toLowerCase(), + new Literal( + StringUtils.unquoteText(v.relevanceArgValue().getText()), + DataType.STRING)))); return builder.build(); } - } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java index 3b7e5a78dd..e276e6d523 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java @@ -21,9 +21,7 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser; import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; -/** - * Build {@link Statement} from PPL Query. - */ +/** Build {@link Statement} from PPL Query. */ @RequiredArgsConstructor public class AstStatementBuilder extends OpenSearchPPLParserBaseVisitor { diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java index 941bfe680e..f89ecf9c6e 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.BooleanLiteralContext; @@ -24,9 +23,7 @@ import org.opensearch.sql.ast.expression.Literal; import org.opensearch.sql.common.utils.StringUtils; -/** - * Util class to get all arguments as a list from the PPL command. - */ +/** Util class to get all arguments as a list from the PPL command. */ public class ArgumentFactory { /** @@ -39,8 +36,7 @@ public static List getArgumentList(FieldsCommandContext ctx) { return Collections.singletonList( ctx.MINUS() != null ? new Argument("exclude", new Literal(true, DataType.BOOLEAN)) - : new Argument("exclude", new Literal(false, DataType.BOOLEAN)) - ); + : new Argument("exclude", new Literal(false, DataType.BOOLEAN))); } /** @@ -62,8 +58,7 @@ public static List getArgumentList(StatsCommandContext ctx) { : new Argument("delim", new Literal(" ", DataType.STRING)), ctx.dedupsplit != null ? new Argument("dedupsplit", getArgumentValue(ctx.dedupsplit)) - : new Argument("dedupsplit", new Literal(false, DataType.BOOLEAN)) - ); + : new Argument("dedupsplit", new Literal(false, DataType.BOOLEAN))); } /** @@ -82,8 +77,7 @@ public static List getArgumentList(DedupCommandContext ctx) { : new Argument("keepempty", new Literal(false, DataType.BOOLEAN)), ctx.consecutive != null ? new Argument("consecutive", getArgumentValue(ctx.consecutive)) - : new Argument("consecutive", new Literal(false, DataType.BOOLEAN)) - ); + : new Argument("consecutive", new Literal(false, DataType.BOOLEAN))); } /** @@ -100,13 +94,12 @@ public static List getArgumentList(SortFieldContext ctx) { ctx.sortFieldExpression().AUTO() != null ? new Argument("type", new Literal("auto", DataType.STRING)) : ctx.sortFieldExpression().IP() != null - ? new Argument("type", new Literal("ip", DataType.STRING)) - : ctx.sortFieldExpression().NUM() != null - ? new Argument("type", new Literal("num", DataType.STRING)) - : ctx.sortFieldExpression().STR() != null - ? new Argument("type", new Literal("str", DataType.STRING)) - : new Argument("type", new Literal(null, DataType.NULL)) - ); + ? new Argument("type", new Literal("ip", DataType.STRING)) + : ctx.sortFieldExpression().NUM() != null + ? new Argument("type", new Literal("num", DataType.STRING)) + : ctx.sortFieldExpression().STR() != null + ? new Argument("type", new Literal("str", DataType.STRING)) + : new Argument("type", new Literal(null, DataType.NULL))); } /** @@ -119,8 +112,7 @@ public static List getArgumentList(TopCommandContext ctx) { return Collections.singletonList( ctx.number != null ? new Argument("noOfResults", getArgumentValue(ctx.number)) - : new Argument("noOfResults", new Literal(10, DataType.INTEGER)) - ); + : new Argument("noOfResults", new Literal(10, DataType.INTEGER))); } /** @@ -130,21 +122,21 @@ public static List getArgumentList(TopCommandContext ctx) { * @return the list of argument with default number of results for the rare command */ public static List getArgumentList(RareCommandContext ctx) { - return Collections - .singletonList(new Argument("noOfResults", new Literal(10, DataType.INTEGER))); + return Collections.singletonList( + new Argument("noOfResults", new Literal(10, DataType.INTEGER))); } /** * parse argument value into Literal. + * * @param ctx ParserRuleContext instance * @return Literal */ private static Literal getArgumentValue(ParserRuleContext ctx) { return ctx instanceof IntegerLiteralContext - ? new Literal(Integer.parseInt(ctx.getText()), DataType.INTEGER) - : ctx instanceof BooleanLiteralContext + ? new Literal(Integer.parseInt(ctx.getText()), DataType.INTEGER) + : ctx instanceof BooleanLiteralContext ? new Literal(Boolean.valueOf(ctx.getText()), DataType.BOOLEAN) : new Literal(StringUtils.unquoteText(ctx.getText()), DataType.STRING); } - } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java index 1f0e6f0d52..d28e5d122b 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import com.google.common.base.Strings; @@ -54,9 +53,7 @@ import org.opensearch.sql.planner.logical.LogicalRename; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Utility class to mask sensitive information in incoming PPL queries. - */ +/** Utility class to mask sensitive information in incoming PPL queries. */ public class PPLQueryDataAnonymizer extends AbstractNodeVisitor { private static final String MASK_LITERAL = "***"; @@ -68,8 +65,8 @@ public PPLQueryDataAnonymizer() { } /** - * This method is used to anonymize sensitive data in PPL query. - * Sensitive data includes user data., + * This method is used to anonymize sensitive data in PPL query. Sensitive data includes user + * data. * * @return ppl query string with all user data replace with "***" */ @@ -81,9 +78,7 @@ public String anonymizeStatement(Statement plan) { return plan.accept(this, null); } - /** - * Handle Query Statement. - */ + /** Handle Query Statement. */ @Override public String visitQuery(Query node, String context) { return node.getPlan().accept(this, null); @@ -103,8 +98,9 @@ public String visitRelation(Relation node, String context) { public String visitTableFunction(TableFunction node, String context) { String arguments = node.getArguments().stream() - .map(unresolvedExpression - -> this.expressionAnalyzer.analyze(unresolvedExpression, context)) + .map( + unresolvedExpression -> + this.expressionAnalyzer.analyze(unresolvedExpression, context)) .collect(Collectors.joining(",")); return StringUtils.format("source=%s(%s)", node.getFunctionName().toString(), arguments); } @@ -116,37 +112,34 @@ public String visitFilter(Filter node, String context) { return StringUtils.format("%s | where %s", child, condition); } - /** - * Build {@link LogicalRename}. - */ + /** Build {@link LogicalRename}. */ @Override public String visitRename(Rename node, String context) { String child = node.getChild().get(0).accept(this, context); ImmutableMap.Builder renameMapBuilder = new ImmutableMap.Builder<>(); for (Map renameMap : node.getRenameList()) { - renameMapBuilder.put(visitExpression(renameMap.getOrigin()), + renameMapBuilder.put( + visitExpression(renameMap.getOrigin()), ((Field) renameMap.getTarget()).getField().toString()); } String renames = - renameMapBuilder.build().entrySet().stream().map(entry -> StringUtils.format("%s as %s", - entry.getKey(), entry.getValue())).collect(Collectors.joining(",")); + renameMapBuilder.build().entrySet().stream() + .map(entry -> StringUtils.format("%s as %s", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",")); return StringUtils.format("%s | rename %s", child, renames); } - /** - * Build {@link LogicalAggregation}. - */ + /** Build {@link LogicalAggregation}. */ @Override public String visitAggregation(Aggregation node, String context) { String child = node.getChild().get(0).accept(this, context); final String group = visitExpressionList(node.getGroupExprList()); - return StringUtils.format("%s | stats %s", child, - String.join(" ", visitExpressionList(node.getAggExprList()), groupBy(group)).trim()); + return StringUtils.format( + "%s | stats %s", + child, String.join(" ", visitExpressionList(node.getAggExprList()), groupBy(group)).trim()); } - /** - * Build {@link LogicalRareTopN}. - */ + /** Build {@link LogicalRareTopN}. */ @Override public String visitRareTopN(RareTopN node, String context) { final String child = node.getChild().get(0).accept(this, context); @@ -154,16 +147,15 @@ public String visitRareTopN(RareTopN node, String context) { Integer noOfResults = (Integer) options.get(0).getValue().getValue(); String fields = visitFieldList(node.getFields()); String group = visitExpressionList(node.getGroupExprList()); - return StringUtils.format("%s | %s %d %s", child, + return StringUtils.format( + "%s | %s %d %s", + child, node.getCommandType().name().toLowerCase(), noOfResults, - String.join(" ", fields, groupBy(group)).trim() - ); + String.join(" ", fields, groupBy(group)).trim()); } - /** - * Build {@link LogicalProject} or {@link LogicalRemove} from {@link Field}. - */ + /** Build {@link LogicalProject} or {@link LogicalRemove} from {@link Field}. */ @Override public String visitProject(Project node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -180,9 +172,7 @@ public String visitProject(Project node, String context) { return StringUtils.format("%s | fields %s %s", child, arg, fields); } - /** - * Build {@link LogicalEval}. - */ + /** Build {@link LogicalEval}. */ @Override public String visitEval(Eval node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -192,14 +182,14 @@ public String visitEval(Eval node, String context) { String target = let.getVar().getField().toString(); expressionsBuilder.add(ImmutablePair.of(target, expression)); } - String expressions = expressionsBuilder.build().stream().map(pair -> StringUtils.format("%s" - + "=%s", pair.getLeft(), pair.getRight())).collect(Collectors.joining(" ")); + String expressions = + expressionsBuilder.build().stream() + .map(pair -> StringUtils.format("%s" + "=%s", pair.getLeft(), pair.getRight())) + .collect(Collectors.joining(" ")); return StringUtils.format("%s | eval %s", child, expressions); } - /** - * Build {@link LogicalSort}. - */ + /** Build {@link LogicalSort}. */ @Override public String visitSort(Sort node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -208,9 +198,7 @@ public String visitSort(Sort node, String context) { return StringUtils.format("%s | sort %s", child, sortList); } - /** - * Build {@link LogicalDedupe}. - */ + /** Build {@link LogicalDedupe}. */ @Override public String visitDedupe(Dedupe node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -220,10 +208,9 @@ public String visitDedupe(Dedupe node, String context) { Boolean keepEmpty = (Boolean) options.get(1).getValue().getValue(); Boolean consecutive = (Boolean) options.get(2).getValue().getValue(); - return StringUtils - .format("%s | dedup %s %d keepempty=%b consecutive=%b", child, fields, allowedDuplication, - keepEmpty, - consecutive); + return StringUtils.format( + "%s | dedup %s %d keepempty=%b consecutive=%b", + child, fields, allowedDuplication, keepEmpty, consecutive); } @Override @@ -238,8 +225,9 @@ private String visitFieldList(List fieldList) { } private String visitExpressionList(List expressionList) { - return expressionList.isEmpty() ? "" : - expressionList.stream().map(this::visitExpression).collect(Collectors.joining(",")); + return expressionList.isEmpty() + ? "" + : expressionList.stream().map(this::visitExpression).collect(Collectors.joining(",")); } private String visitExpression(UnresolvedExpression expression) { @@ -250,11 +238,8 @@ private String groupBy(String groupBy) { return Strings.isNullOrEmpty(groupBy) ? "" : StringUtils.format("by %s", groupBy); } - /** - * Expression Anonymizer. - */ - private static class AnonymizerExpressionAnalyzer extends AbstractNodeVisitor { + /** Expression Anonymizer. */ + private static class AnonymizerExpressionAnalyzer extends AbstractNodeVisitor { public String analyze(UnresolvedExpression unresolved, String context) { return unresolved.accept(this, context); diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java index 4fb9eee6a0..a502f2d769 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import com.google.common.collect.ImmutableList; @@ -12,15 +11,11 @@ import org.opensearch.sql.ast.tree.Project; import org.opensearch.sql.ast.tree.UnresolvedPlan; -/** - * The helper to add select to {@link UnresolvedPlan} if needed. - */ +/** The helper to add select to {@link UnresolvedPlan} if needed. */ @UtilityClass public class UnresolvedPlanHelper { - /** - * Attach Select All to PPL commands if required. - */ + /** Attach Select All to PPL commands if required. */ public UnresolvedPlan addSelectAll(UnresolvedPlan plan) { if ((plan instanceof Project) && !((Project) plan).isExcluded()) { return plan; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java index c14eb3dba1..598f6691cb 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.mockito.ArgumentMatchers.any; @@ -41,21 +40,17 @@ public class PPLServiceTest { private DefaultQueryManager queryManager; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; - @Mock - private ExecutionEngine.Schema schema; + @Mock private ExecutionEngine.Schema schema; - /** - * Setup the test context. - */ + /** Setup the test context. */ @Before public void setUp() { queryManager = DefaultQueryManager.defaultQueryManager(); - pplService = new PPLService(new PPLSyntaxParser(), queryManager, - new QueryPlanFactory(queryService)); + pplService = + new PPLService(new PPLSyntaxParser(), queryManager, new QueryPlanFactory(queryService)); } @After @@ -65,18 +60,20 @@ public void cleanup() throws InterruptedException { @Test public void testExecuteShouldPass() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); - return null; - }).when(queryService).execute(any(), any()); - - pplService.execute(new PPLQueryRequest("search source=t a=1", null, QUERY), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); + return null; + }) + .when(queryService) + .execute(any(), any()); + + pplService.execute( + new PPLQueryRequest("search source=t a=1", null, QUERY), new ResponseListener() { @Override - public void onResponse(QueryResponse pplQueryResponse) { - - } + public void onResponse(QueryResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -87,17 +84,20 @@ public void onFailure(Exception e) { @Test public void testExecuteCsvFormatShouldPass() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); - return null; - }).when(queryService).execute(any(), any()); - - pplService.execute(new PPLQueryRequest("search source=t a=1", null, QUERY, "csv"), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); + return null; + }) + .when(queryService) + .execute(any(), any()); + + pplService.execute( + new PPLQueryRequest("search source=t a=1", null, QUERY, "csv"), new ResponseListener() { @Override - public void onResponse(QueryResponse pplQueryResponse) { - } + public void onResponse(QueryResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -108,17 +108,20 @@ public void onFailure(Exception e) { @Test public void testExplainShouldPass() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new ExplainResponse(new ExplainResponseNode("test"))); - return null; - }).when(queryService).explain(any(), any()); - - pplService.explain(new PPLQueryRequest("search source=t a=1", null, EXPLAIN), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new ExplainResponse(new ExplainResponseNode("test"))); + return null; + }) + .when(queryService) + .explain(any(), any()); + + pplService.explain( + new PPLQueryRequest("search source=t a=1", null, EXPLAIN), new ResponseListener() { @Override - public void onResponse(ExplainResponse pplQueryResponse) { - } + public void onResponse(ExplainResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -129,7 +132,8 @@ public void onFailure(Exception e) { @Test public void testExecuteWithIllegalQueryShouldBeCaughtByHandler() { - pplService.execute(new PPLQueryRequest("search", null, QUERY), + pplService.execute( + new PPLQueryRequest("search", null, QUERY), new ResponseListener() { @Override public void onResponse(QueryResponse pplQueryResponse) { @@ -137,15 +141,14 @@ public void onResponse(QueryResponse pplQueryResponse) { } @Override - public void onFailure(Exception e) { - - } + public void onFailure(Exception e) {} }); } @Test public void testExplainWithIllegalQueryShouldBeCaughtByHandler() { - pplService.explain(new PPLQueryRequest("search", null, QUERY), + pplService.explain( + new PPLQueryRequest("search", null, QUERY), new ResponseListener<>() { @Override public void onResponse(ExplainResponse pplQueryResponse) { @@ -153,26 +156,26 @@ public void onResponse(ExplainResponse pplQueryResponse) { } @Override - public void onFailure(Exception e) { - - } + public void onFailure(Exception e) {} }); } @Test public void testPrometheusQuery() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); - return null; - }).when(queryService).execute(any(), any()); - - pplService.execute(new PPLQueryRequest("source = prometheus.http_requests_total", null, QUERY), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); + return null; + }) + .when(queryService) + .execute(any(), any()); + + pplService.execute( + new PPLQueryRequest("source = prometheus.http_requests_total", null, QUERY), new ResponseListener<>() { @Override - public void onResponse(QueryResponse pplQueryResponse) { - - } + public void onResponse(QueryResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -183,7 +186,8 @@ public void onFailure(Exception e) { @Test public void testInvalidPPLQuery() { - pplService.execute(new PPLQueryRequest("search", null, QUERY), + pplService.execute( + new PPLQueryRequest("search", null, QUERY), new ResponseListener() { @Override public void onResponse(QueryResponse pplQueryResponse) { @@ -191,9 +195,7 @@ public void onResponse(QueryResponse pplQueryResponse) { } @Override - public void onFailure(Exception e) { - - } + public void onFailure(Exception e) {} }); } } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java index 9f635fdd81..f6a04983e2 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.antlr; import static org.junit.Assert.assertNotEquals; @@ -21,6 +20,7 @@ public class NowLikeFunctionParserTest { /** * Set parameterized values used in test. + * * @param name Function name * @param hasFsp Whether function has fsp argument * @param hasShortcut Whether function has shortcut (call without `()`) @@ -33,24 +33,26 @@ public NowLikeFunctionParserTest(String name, Boolean hasFsp, Boolean hasShortcu /** * Returns function data to test. + * * @return An iterable. */ @Parameterized.Parameters(name = "{0}") public static Iterable functionNames() { - return List.of(new Object[][]{ - {"now", true, false}, - {"current_timestamp", true, true}, - {"localtimestamp", true, true}, - {"localtime", true, true}, - {"sysdate", true, false}, - {"curtime", true, false}, - {"current_time", true, true}, - {"curdate", false, false}, - {"current_date", false, true}, - {"utc_date", false, false}, - {"utc_time", false, false}, - {"utc_timestamp", false, false} - }); + return List.of( + new Object[][] { + {"now", true, false}, + {"current_timestamp", true, true}, + {"localtimestamp", true, true}, + {"localtime", true, true}, + {"sysdate", true, false}, + {"curtime", true, false}, + {"current_time", true, true}, + {"curdate", false, false}, + {"current_date", false, true}, + {"utc_date", false, false}, + {"utc_time", false, false}, + {"utc_timestamp", false, false} + }); } private final String name; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java index dd146ea2cf..7de197028e 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java @@ -13,26 +13,24 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; - @RunWith(Parameterized.class) public class PPLSyntaxParserMatchBoolPrefixSamplesTests { - - /** Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + /** + * Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + * * @return an Iterable of sample queries. */ @Parameterized.Parameters(name = "{0}") public static Iterable sampleQueries() { return List.of( "source=t a= 1 | where match_bool_prefix(a, 'hello world')", - "source=t a = 1 | where match_bool_prefix(a, 'hello world'," - + " minimum_should_match = 3)", + "source=t a = 1 | where match_bool_prefix(a, 'hello world'," + " minimum_should_match = 3)", "source=t a = 1 | where match_bool_prefix(a, 'hello world', fuzziness='AUTO')", "source=t a = 1 | where match_bool_prefix(a, 'hello world', fuzziness='AUTO:4,6')", "source=t a= 1 | where match_bool_prefix(a, 'hello world', prefix_length=0)", "source=t a= 1 | where match_bool_prefix(a, 'hello world', max_expansions=1)", - "source=t a= 1 | where match_bool_prefix(a, 'hello world'," - + " fuzzy_transpositions=true)", + "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_transpositions=true)", "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_rewrite=constant_score)", "source=t a= 1 | where match_bool_prefix(a, 'hello world'," @@ -43,8 +41,7 @@ public static Iterable sampleQueries() { + " fuzzy_rewrite=top_terms_blended_freqs_1)", "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_rewrite=top_terms_boost_1)", - "source=t a= 1 | where match_bool_prefix(a, 'hello world'," - + " fuzzy_rewrite=top_terms_1)", + "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_rewrite=top_terms_1)", "source=t a= 1 | where match_bool_prefix(a, 'hello world', boost=1)", "source=t a = 1 | where match_bool_prefix(a, 'hello world', analyzer = 'standard'," + "prefix_length = '0', boost = 1)"); diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java index aef6d1d69e..94222ec103 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java @@ -13,22 +13,22 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; - @RunWith(Parameterized.class) public class PPLSyntaxParserMatchPhraseSamplesTest { - - /** Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + /** + * Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + * * @return an Iterable of sample queries. */ @Parameterized.Parameters(name = "{0}") public static Iterable sampleQueries() { return List.of( - "source=t a= 1 | where match_phrase(a, 'hello world')", - "source=t a = 1 | where match_phrase(a, 'hello world', slop = 3)", - "source=t a = 1 | where match_phrase(a, 'hello world', analyzer = 'standard'," - + "zero_terms_query = 'none', slop = 3)", - "source=t a = 1 | where match_phrase(a, 'hello world', zero_terms_query = all)"); + "source=t a= 1 | where match_phrase(a, 'hello world')", + "source=t a = 1 | where match_phrase(a, 'hello world', slop = 3)", + "source=t a = 1 | where match_phrase(a, 'hello world', analyzer = 'standard'," + + "zero_terms_query = 'none', slop = 3)", + "source=t a = 1 | where match_phrase(a, 'hello world', zero_terms_query = all)"); } private final String query; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java index 57cee7fa1d..943953d416 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.antlr; import static org.junit.Assert.assertNotEquals; @@ -19,8 +18,7 @@ public class PPLSyntaxParserTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Test public void testSearchCommandShouldPass() { @@ -140,99 +138,170 @@ public void testTopCommandWithoutNAndGroupByShouldPass() { @Test public void testCanParseMultiMatchRelevanceFunction() { - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address', 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"*\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"address\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([`address`], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([address], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address', 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," - + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match(['address'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address', 'notes'], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([\"*\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([\"address\"], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([`address`], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([address], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address', 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," + + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); } @Test public void testCanParseSimpleQueryStringRelevanceFunction() { - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address', 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"*\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"address\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([`address`], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([address], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address', 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," - + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address', 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE simple_query_string([\"*\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string([\"address\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string([`address`], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE simple_query_string([address], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2]," + + " 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address', 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2]," + + " 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2]," + + " 'query',analyzer=keyword, quote_field_suffix=\".exact\"," + + " fuzzy_prefix_length = 4)")); } @Test public void testCanParseQueryStringRelevanceFunction() { - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address', 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"*\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"address\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([`address`], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([address], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address', 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," - + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string(['address'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address', 'notes'], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([\"*\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([\"address\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([`address`], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([address], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address', 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," + + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); } @Test @@ -275,15 +344,35 @@ public void testDescribeCommandWithSourceShouldFail() { @Test public void testCanParseExtractFunction() { - String[] parts = List.of("MICROSECOND", "SECOND", "MINUTE", "HOUR", "DAY", - "WEEK", "MONTH", "QUARTER", "YEAR", "SECOND_MICROSECOND", - "MINUTE_MICROSECOND", "MINUTE_SECOND", "HOUR_MICROSECOND", - "HOUR_SECOND", "HOUR_MINUTE", "DAY_MICROSECOND", - "DAY_SECOND", "DAY_MINUTE", "DAY_HOUR", "YEAR_MONTH").toArray(new String[0]); + String[] parts = + List.of( + "MICROSECOND", + "SECOND", + "MINUTE", + "HOUR", + "DAY", + "WEEK", + "MONTH", + "QUARTER", + "YEAR", + "SECOND_MICROSECOND", + "MINUTE_MICROSECOND", + "MINUTE_SECOND", + "HOUR_MICROSECOND", + "HOUR_SECOND", + "HOUR_MINUTE", + "DAY_MICROSECOND", + "DAY_SECOND", + "DAY_MINUTE", + "DAY_HOUR", + "YEAR_MONTH") + .toArray(new String[0]); for (String part : parts) { - assertNotNull(new PPLSyntaxParser().parse( - String.format("SOURCE=test | eval k = extract(%s FROM \"2023-02-06\")", part))); + assertNotNull( + new PPLSyntaxParser() + .parse( + String.format("SOURCE=test | eval k = extract(%s FROM \"2023-02-06\")", part))); } } @@ -294,8 +383,9 @@ public void testCanParseGetFormatFunction() { for (String type : types) { for (String format : formats) { - assertNotNull(new PPLSyntaxParser().parse( - String.format("SOURCE=test | eval k = get_format(%s, %s)", type, format))); + assertNotNull( + new PPLSyntaxParser() + .parse(String.format("SOURCE=test | eval k = get_format(%s, %s)", type, format))); } } } @@ -303,24 +393,28 @@ public void testCanParseGetFormatFunction() { @Test public void testCannotParseGetFormatFunctionWithBadArg() { assertThrows( - SyntaxCheckException.class, - () -> new PPLSyntaxParser().parse( - "SOURCE=test | eval k = GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); + SyntaxCheckException.class, + () -> + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); } @Test public void testCanParseTimestampaddFunction() { - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPADD(MINUTE, 1, '2003-01-02')")); - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPADD(WEEK,1,'2003-01-02')")); + assertNotNull( + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = TIMESTAMPADD(MINUTE, 1, '2003-01-02')")); + assertNotNull( + new PPLSyntaxParser().parse("SOURCE=test | eval k = TIMESTAMPADD(WEEK,1,'2003-01-02')")); } @Test public void testCanParseTimestampdiffFunction() { - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPDIFF(MINUTE, '2003-01-02', '2003-01-02')")); - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPDIFF(WEEK,'2003-01-02','2003-01-02')")); + assertNotNull( + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = TIMESTAMPDIFF(MINUTE, '2003-01-02', '2003-01-02')")); + assertNotNull( + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = TIMESTAMPDIFF(WEEK,'2003-01-02','2003-01-02')")); } } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java index b53656e252..29e6ff3298 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; import static org.junit.Assert.assertEquals; @@ -16,8 +15,7 @@ public class PPLQueryRequestTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Test public void getRequestShouldPass() { @@ -27,40 +25,34 @@ public void getRequestShouldPass() { @Test public void testExplainRequest() { - PPLQueryRequest request = new PPLQueryRequest( - "source=t a=1", null, "/_plugins/_ppl/_explain"); + PPLQueryRequest request = new PPLQueryRequest("source=t a=1", null, "/_plugins/_ppl/_explain"); assertTrue(request.isExplainRequest()); } @Test public void testDefaultFormat() { - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl"); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl"); assertEquals(request.format(), Format.JDBC); } @Test public void testJDBCFormat() { - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl", "jdbc"); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl", "jdbc"); assertEquals(request.format(), Format.JDBC); } @Test public void testCSVFormat() { - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl", "csv"); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl", "csv"); assertEquals(request.format(), Format.CSV); } @Test public void testUnsupportedFormat() { String format = "notsupport"; - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl", format); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl", format); exceptionRule.expect(IllegalArgumentException.class); exceptionRule.expectMessage("response in " + format + " format is not supported."); request.format(); } - } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java index 03eaaf22f4..50be4efa2e 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; import org.junit.Test; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java index 599f6bdd75..c9989a49c4 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static java.util.Collections.emptyList; @@ -62,353 +61,264 @@ public class AstBuilderTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); private PPLSyntaxParser parser = new PPLSyntaxParser(); @Test public void testSearchCommand() { - assertEqual("search source=t a=1", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - ) - ); + assertEqual( + "search source=t a=1", filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testSearchCrossClusterCommand() { - assertEqual("search source=c:t", - relation(qualifiedName("c:t")) - ); + assertEqual("search source=c:t", relation(qualifiedName("c:t"))); } @Test public void testSearchMatchAllCrossClusterCommand() { - assertEqual("search source=*:t", - relation(qualifiedName("*:t")) - ); + assertEqual("search source=*:t", relation(qualifiedName("*:t"))); } @Test public void testPrometheusSearchCommand() { - assertEqual("search source = prometheus.http_requests_total", - relation(qualifiedName("prometheus", "http_requests_total")) - ); + assertEqual( + "search source = prometheus.http_requests_total", + relation(qualifiedName("prometheus", "http_requests_total"))); } @Test public void testSearchCommandWithDataSourceEscape() { - assertEqual("search source = `prometheus.http_requests_total`", - relation("prometheus.http_requests_total") - ); + assertEqual( + "search source = `prometheus.http_requests_total`", + relation("prometheus.http_requests_total")); } @Test public void testSearchCommandWithDotInIndexName() { - assertEqual("search source = http_requests_total.test", - relation(qualifiedName("http_requests_total","test")) - ); + assertEqual( + "search source = http_requests_total.test", + relation(qualifiedName("http_requests_total", "test"))); } @Test public void testSearchWithPrometheusQueryRangeWithPositionedArguments() { - assertEqual("search source = prometheus.query_range(\"test{code='200'}\",1234, 12345, 3)", - tableFunction(Arrays.asList("prometheus", "query_range"), + assertEqual( + "search source = prometheus.query_range(\"test{code='200'}\",1234, 12345, 3)", + tableFunction( + Arrays.asList("prometheus", "query_range"), unresolvedArg(null, stringLiteral("test{code='200'}")), unresolvedArg(null, intLiteral(1234)), unresolvedArg(null, intLiteral(12345)), - unresolvedArg(null, intLiteral(3)) - )); + unresolvedArg(null, intLiteral(3)))); } @Test public void testSearchWithPrometheusQueryRangeWithNamedArguments() { - assertEqual("search source = prometheus.query_range(query = \"test{code='200'}\", " + assertEqual( + "search source = prometheus.query_range(query = \"test{code='200'}\", " + "starttime = 1234, step=3, endtime=12345)", - tableFunction(Arrays.asList("prometheus", "query_range"), + tableFunction( + Arrays.asList("prometheus", "query_range"), unresolvedArg("query", stringLiteral("test{code='200'}")), unresolvedArg("starttime", intLiteral(1234)), unresolvedArg("step", intLiteral(3)), - unresolvedArg("endtime", intLiteral(12345)) - )); + unresolvedArg("endtime", intLiteral(12345)))); } @Test public void testSearchCommandString() { - assertEqual("search source=t a=\"a\"", - filter( - relation("t"), - compare("=", field("a"), stringLiteral("a")) - ) - ); + assertEqual( + "search source=t a=\"a\"", + filter(relation("t"), compare("=", field("a"), stringLiteral("a")))); } @Test public void testSearchCommandWithoutSearch() { - assertEqual("source=t a=1", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - ) - ); + assertEqual("source=t a=1", filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testSearchCommandWithFilterBeforeSource() { - assertEqual("search a=1 source=t", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - )); + assertEqual( + "search a=1 source=t", filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testWhereCommand() { - assertEqual("search source=t | where a=1", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - ) - ); + assertEqual( + "search source=t | where a=1", + filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testWhereCommandWithQualifiedName() { - assertEqual("search source=t | where a.v=1", - filter( - relation("t"), - compare("=", field(qualifiedName("a", "v")), intLiteral(1)) - ) - ); + assertEqual( + "search source=t | where a.v=1", + filter(relation("t"), compare("=", field(qualifiedName("a", "v")), intLiteral(1)))); } @Test public void testFieldsCommandWithoutArguments() { - assertEqual("source=t | fields f, g", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field("f"), field("g") - )); + assertEqual( + "source=t | fields f, g", + projectWithArg(relation("t"), defaultFieldsArgs(), field("f"), field("g"))); } @Test public void testFieldsCommandWithIncludeArguments() { - assertEqual("source=t | fields + f, g", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field("f"), field("g") - )); + assertEqual( + "source=t | fields + f, g", + projectWithArg(relation("t"), defaultFieldsArgs(), field("f"), field("g"))); } @Test public void testFieldsCommandWithExcludeArguments() { - assertEqual("source=t | fields - f, g", + assertEqual( + "source=t | fields - f, g", projectWithArg( relation("t"), exprList(argument("exclude", booleanLiteral(true))), - field("f"), field("g") - )); + field("f"), + field("g"))); } @Test public void testSearchCommandWithQualifiedName() { - assertEqual("source=t | fields f.v, g.v", + assertEqual( + "source=t | fields f.v, g.v", projectWithArg( relation("t"), defaultFieldsArgs(), - field(qualifiedName("f", "v")), field(qualifiedName("g", "v")) - )); + field(qualifiedName("f", "v")), + field(qualifiedName("g", "v")))); } @Test public void testRenameCommand() { - assertEqual("source=t | rename f as g", - rename( - relation("t"), - map("f", "g") - )); + assertEqual("source=t | rename f as g", rename(relation("t"), map("f", "g"))); } @Test public void testRenameCommandWithMultiFields() { - assertEqual("source=t | rename f as g, h as i, j as k", - rename( - relation("t"), - map("f", "g"), - map("h", "i"), - map("j", "k") - )); + assertEqual( + "source=t | rename f as g, h as i, j as k", + rename(relation("t"), map("f", "g"), map("h", "i"), map("j", "k"))); } @Test public void testStatsCommand() { - assertEqual("source=t | stats count(a)", + assertEqual( + "source=t | stats count(a)", agg( relation("t"), - exprList( - alias( - "count(a)", - aggregate("count", field("a")) - ) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testStatsCommandWithByClause() { - assertEqual("source=t | stats count(a) by b DEDUP_SPLITVALUES=false", + assertEqual( + "source=t | stats count(a) by b DEDUP_SPLITVALUES=false", agg( relation("t"), - exprList( - alias( - "count(a)", - aggregate("count", field("a")) - ) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStatsCommandWithByClauseInBackticks() { - assertEqual("source=t | stats count(a) by `b` DEDUP_SPLITVALUES=false", + assertEqual( + "source=t | stats count(a) by `b` DEDUP_SPLITVALUES=false", agg( relation("t"), - exprList( - alias( - "count(a)", - aggregate("count", field("a")) - ) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStatsCommandWithAlias() { - assertEqual("source=t | stats count(a) as alias", + assertEqual( + "source=t | stats count(a) as alias", agg( relation("t"), - exprList( - alias( - "alias", - aggregate("count", field("a")) - ) - ), + exprList(alias("alias", aggregate("count", field("a")))), emptyList(), emptyList(), - defaultStatsArgs() - ) - ); + defaultStatsArgs())); } @Test public void testStatsCommandWithNestedFunctions() { - assertEqual("source=t | stats sum(a+b)", + assertEqual( + "source=t | stats sum(a+b)", agg( relation("t"), - exprList( - alias( - "sum(a+b)", - aggregate( - "sum", - function("+", field("a"), field("b")) - )) - ), + exprList(alias("sum(a+b)", aggregate("sum", function("+", field("a"), field("b"))))), emptyList(), emptyList(), - defaultStatsArgs() - )); - assertEqual("source=t | stats sum(abs(a)/2)", + defaultStatsArgs())); + assertEqual( + "source=t | stats sum(abs(a)/2)", agg( relation("t"), exprList( alias( "sum(abs(a)/2)", - aggregate( - "sum", - function( - "/", - function("abs", field("a")), - intLiteral(2) - ) - ) - ) - ), + aggregate("sum", function("/", function("abs", field("a")), intLiteral(2))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testStatsCommandWithSpan() { - assertEqual("source=t | stats avg(price) by span(timestamp, 1h)", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h)", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), emptyList(), alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)), - defaultStatsArgs() - )); + defaultStatsArgs())); - assertEqual("source=t | stats count(a) by span(age, 10)", + assertEqual( + "source=t | stats count(a) by span(age, 10)", agg( relation("t"), - exprList( - alias("count(a)", aggregate("count", field("a"))) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), emptyList(), alias("span(age,10)", span(field("age"), intLiteral(10), SpanUnit.NONE)), - defaultStatsArgs() - )); + defaultStatsArgs())); - assertEqual("source=t | stats avg(price) by span(timestamp, 1h), b", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h), b", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), exprList(alias("b", field("b"))), alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)), - defaultStatsArgs() - )); + defaultStatsArgs())); - assertEqual("source=t | stats avg(price) by span(timestamp, 1h), f1, f2", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h), f1, f2", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), exprList(alias("f1", field("f1")), alias("f2", field("f2"))), alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test(expected = org.opensearch.sql.common.antlr.SyntaxCheckException.class) @@ -423,152 +333,128 @@ public void throwExceptionWithEmptyGroupByList() { @Test public void testStatsSpanWithAlias() { - assertEqual("source=t | stats avg(price) by span(timestamp, 1h) as time_span", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h) as time_span", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), emptyList(), - alias("span(timestamp,1h)", span( - field("timestamp"), intLiteral(1), SpanUnit.H), "time_span"), - defaultStatsArgs() - )); + alias( + "span(timestamp,1h)", + span(field("timestamp"), intLiteral(1), SpanUnit.H), + "time_span"), + defaultStatsArgs())); - assertEqual("source=t | stats count(a) by span(age, 10) as numeric_span", + assertEqual( + "source=t | stats count(a) by span(age, 10) as numeric_span", agg( relation("t"), - exprList( - alias("count(a)", aggregate("count", field("a"))) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), emptyList(), - alias("span(age,10)", span( - field("age"), intLiteral(10), SpanUnit.NONE), "numeric_span"), - defaultStatsArgs() - )); + alias( + "span(age,10)", span(field("age"), intLiteral(10), SpanUnit.NONE), "numeric_span"), + defaultStatsArgs())); } @Test public void testDedupCommand() { - assertEqual("source=t | dedup f1, f2", - dedupe( - relation("t"), - defaultDedupArgs(), - field("f1"), field("f2") - )); + assertEqual( + "source=t | dedup f1, f2", + dedupe(relation("t"), defaultDedupArgs(), field("f1"), field("f2"))); } - /** - * disable sortby from the dedup command syntax. - */ + /** disable sortby from the dedup command syntax. */ @Ignore(value = "disable sortby from the dedup command syntax") public void testDedupCommandWithSortby() { - assertEqual("source=t | dedup f1, f2 sortby f3", + assertEqual( + "source=t | dedup f1, f2 sortby f3", agg( relation("t"), exprList(field("f1"), field("f2")), exprList(field("f3", defaultSortFieldArgs())), null, - defaultDedupArgs() - )); + defaultDedupArgs())); } @Test public void testHeadCommand() { - assertEqual("source=t | head", - head(relation("t"), 10, 0)); + assertEqual("source=t | head", head(relation("t"), 10, 0)); } @Test public void testHeadCommandWithNumber() { - assertEqual("source=t | head 3", - head(relation("t"), 3, 0)); + assertEqual("source=t | head 3", head(relation("t"), 3, 0)); } @Test public void testHeadCommandWithNumberAndOffset() { - assertEqual("source=t | head 3 from 4", - head(relation("t"), 3, 4)); + assertEqual("source=t | head 3 from 4", head(relation("t"), 3, 4)); } @Test public void testSortCommand() { - assertEqual("source=t | sort f1, f2", + assertEqual( + "source=t | sort f1, f2", sort( relation("t"), field("f1", defaultSortFieldArgs()), - field("f2", defaultSortFieldArgs()) - )); + field("f2", defaultSortFieldArgs()))); } @Test public void testSortCommandWithOptions() { - assertEqual("source=t | sort - f1, + f2", + assertEqual( + "source=t | sort - f1, + f2", sort( relation("t"), - field("f1", exprList(argument("asc", booleanLiteral(false)), - argument("type", nullLiteral()))), - field("f2", defaultSortFieldArgs()) - )); + field( + "f1", + exprList(argument("asc", booleanLiteral(false)), argument("type", nullLiteral()))), + field("f2", defaultSortFieldArgs()))); } @Test public void testEvalCommand() { - assertEqual("source=t | eval r=abs(f)", - eval( - relation("t"), - let( - field("r"), - function("abs", field("f")) - ) - )); + assertEqual( + "source=t | eval r=abs(f)", + eval(relation("t"), let(field("r"), function("abs", field("f"))))); } @Test public void testIndexName() { - assertEqual("source=`log.2020.04.20.` a=1", - filter( - relation("log.2020.04.20."), - compare("=", field("a"), intLiteral(1)) - )); - assertEqual("describe `log.2020.04.20.`", - relation(mappingTable("log.2020.04.20."))); + assertEqual( + "source=`log.2020.04.20.` a=1", + filter(relation("log.2020.04.20."), compare("=", field("a"), intLiteral(1)))); + assertEqual("describe `log.2020.04.20.`", relation(mappingTable("log.2020.04.20."))); } @Test public void testIdentifierAsIndexNameStartWithDot() { - assertEqual("source=.opensearch_dashboards", - relation(".opensearch_dashboards")); - assertEqual("describe .opensearch_dashboards", - relation(mappingTable(".opensearch_dashboards"))); + assertEqual("source=.opensearch_dashboards", relation(".opensearch_dashboards")); + assertEqual( + "describe .opensearch_dashboards", relation(mappingTable(".opensearch_dashboards"))); } @Test public void testIdentifierAsIndexNameWithDotInTheMiddle() { assertEqual("source=log.2020.10.10", relation("log.2020.10.10")); assertEqual("source=log-7.10-2020.10.10", relation("log-7.10-2020.10.10")); - assertEqual("describe log.2020.10.10", - relation(mappingTable("log.2020.10.10"))); - assertEqual("describe log-7.10-2020.10.10", - relation(mappingTable("log-7.10-2020.10.10"))); + assertEqual("describe log.2020.10.10", relation(mappingTable("log.2020.10.10"))); + assertEqual("describe log-7.10-2020.10.10", relation(mappingTable("log-7.10-2020.10.10"))); } @Test public void testIdentifierAsIndexNameWithSlashInTheMiddle() { - assertEqual("source=log-2020", - relation("log-2020")); - assertEqual("describe log-2020", - relation(mappingTable("log-2020"))); + assertEqual("source=log-2020", relation("log-2020")); + assertEqual("describe log-2020", relation(mappingTable("log-2020"))); } @Test public void testIdentifierAsIndexNameContainStar() { - assertEqual("source=log-2020-10-*", - relation("log-2020-10-*")); - assertEqual("describe log-2020-10-*", - relation(mappingTable("log-2020-10-*"))); + assertEqual("source=log-2020-10-*", relation("log-2020-10-*")); + assertEqual("describe log-2020-10-*", relation(mappingTable("log-2020-10-*"))); } @Test @@ -576,138 +462,132 @@ public void testIdentifierAsIndexNameContainStarAndDots() { assertEqual("source=log-2020.10.*", relation("log-2020.10.*")); assertEqual("source=log-2020.*.01", relation("log-2020.*.01")); assertEqual("source=log-2020.*.*", relation("log-2020.*.*")); - assertEqual("describe log-2020.10.*", - relation(mappingTable("log-2020.10.*"))); - assertEqual("describe log-2020.*.01", - relation(mappingTable("log-2020.*.01"))); - assertEqual("describe log-2020.*.*", - relation(mappingTable("log-2020.*.*"))); + assertEqual("describe log-2020.10.*", relation(mappingTable("log-2020.10.*"))); + assertEqual("describe log-2020.*.01", relation(mappingTable("log-2020.*.01"))); + assertEqual("describe log-2020.*.*", relation(mappingTable("log-2020.*.*"))); } @Test public void testIdentifierAsFieldNameStartWithAt() { - assertEqual("source=log-2020 | fields @timestamp", - projectWithArg( - relation("log-2020"), - defaultFieldsArgs(), - field("@timestamp") - )); + assertEqual( + "source=log-2020 | fields @timestamp", + projectWithArg(relation("log-2020"), defaultFieldsArgs(), field("@timestamp"))); } @Test public void testRareCommand() { - assertEqual("source=t | rare a", + assertEqual( + "source=t | rare a", rareTopN( relation("t"), CommandType.RARE, exprList(argument("noOfResults", intLiteral(10))), emptyList(), - field("a") - )); + field("a"))); } @Test public void testRareCommandWithGroupBy() { - assertEqual("source=t | rare a by b", + assertEqual( + "source=t | rare a by b", rareTopN( relation("t"), CommandType.RARE, exprList(argument("noOfResults", intLiteral(10))), exprList(field("b")), - field("a") - )); + field("a"))); } @Test public void testRareCommandWithMultipleFields() { - assertEqual("source=t | rare `a`, `b` by `c`", + assertEqual( + "source=t | rare `a`, `b` by `c`", rareTopN( relation("t"), CommandType.RARE, exprList(argument("noOfResults", intLiteral(10))), exprList(field("c")), field("a"), - field("b") - )); + field("b"))); } @Test public void testTopCommandWithN() { - assertEqual("source=t | top 1 a", + assertEqual( + "source=t | top 1 a", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(1))), emptyList(), - field("a") - )); + field("a"))); } @Test public void testTopCommandWithoutNAndGroupBy() { - assertEqual("source=t | top a", + assertEqual( + "source=t | top a", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(10))), emptyList(), - field("a") - )); + field("a"))); } @Test public void testTopCommandWithNAndGroupBy() { - assertEqual("source=t | top 1 a by b", + assertEqual( + "source=t | top 1 a by b", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(1))), exprList(field("b")), - field("a") - )); + field("a"))); } @Test public void testTopCommandWithMultipleFields() { - assertEqual("source=t | top 1 `a`, `b` by `c`", + assertEqual( + "source=t | top 1 `a`, `b` by `c`", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(1))), exprList(field("c")), field("a"), - field("b") - )); + field("b"))); } @Test public void testGrokCommand() { - assertEqual("source=t | grok raw \"pattern\"", + assertEqual( + "source=t | grok raw \"pattern\"", parse( relation("t"), ParseMethod.GROK, field("raw"), stringLiteral("pattern"), - ImmutableMap.of() - )); + ImmutableMap.of())); } @Test public void testParseCommand() { - assertEqual("source=t | parse raw \"pattern\"", + assertEqual( + "source=t | parse raw \"pattern\"", parse( relation("t"), ParseMethod.REGEX, field("raw"), stringLiteral("pattern"), - ImmutableMap.of() - )); + ImmutableMap.of())); } @Test public void testPatternsCommand() { - assertEqual("source=t | patterns new_field=\"custom_field\" " - + "pattern=\"custom_pattern\" raw", + assertEqual( + "source=t | patterns new_field=\"custom_field\" " + "pattern=\"custom_pattern\" raw", parse( relation("t"), ParseMethod.PATTERNS, @@ -716,8 +596,7 @@ public void testPatternsCommand() { ImmutableMap.builder() .put("new_field", stringLiteral("custom_field")) .put("pattern", stringLiteral("custom_pattern")) - .build() - )); + .build())); } @Test @@ -734,114 +613,118 @@ public void testPatternsCommandWithoutArguments() { @Test public void testKmeansCommand() { - assertEqual("source=t | kmeans centroids=3 iterations=2 distance_type='l1'", - new Kmeans(relation("t"), ImmutableMap.builder() - .put("centroids", new Literal(3, DataType.INTEGER)) - .put("iterations", new Literal(2, DataType.INTEGER)) - .put("distance_type", new Literal("l1", DataType.STRING)) - .build() - )); + assertEqual( + "source=t | kmeans centroids=3 iterations=2 distance_type='l1'", + new Kmeans( + relation("t"), + ImmutableMap.builder() + .put("centroids", new Literal(3, DataType.INTEGER)) + .put("iterations", new Literal(2, DataType.INTEGER)) + .put("distance_type", new Literal("l1", DataType.STRING)) + .build())); } @Test public void testKmeansCommandWithoutParameter() { - assertEqual("source=t | kmeans", - new Kmeans(relation("t"), ImmutableMap.of())); + assertEqual("source=t | kmeans", new Kmeans(relation("t"), ImmutableMap.of())); } @Test public void testMLCommand() { - assertEqual("source=t | ml action='trainandpredict' " - + "algorithm='kmeans' centroid=3 iteration=2 dist_type='l1'", - new ML(relation("t"), ImmutableMap.builder() - .put("action", new Literal("trainandpredict", DataType.STRING)) - .put("algorithm", new Literal("kmeans", DataType.STRING)) - .put("centroid", new Literal(3, DataType.INTEGER)) - .put("iteration", new Literal(2, DataType.INTEGER)) - .put("dist_type", new Literal("l1", DataType.STRING)) - .build() - )); + assertEqual( + "source=t | ml action='trainandpredict' " + + "algorithm='kmeans' centroid=3 iteration=2 dist_type='l1'", + new ML( + relation("t"), + ImmutableMap.builder() + .put("action", new Literal("trainandpredict", DataType.STRING)) + .put("algorithm", new Literal("kmeans", DataType.STRING)) + .put("centroid", new Literal(3, DataType.INTEGER)) + .put("iteration", new Literal(2, DataType.INTEGER)) + .put("dist_type", new Literal("l1", DataType.STRING)) + .build())); } @Test public void testDescribeCommand() { - assertEqual("describe t", - relation(mappingTable("t"))); + assertEqual("describe t", relation(mappingTable("t"))); } @Test public void testDescribeMatchAllCrossClusterSearchCommand() { - assertEqual("describe *:t", - relation(mappingTable("*:t"))); + assertEqual("describe *:t", relation(mappingTable("*:t"))); } @Test public void testDescribeCommandWithMultipleIndices() { - assertEqual("describe t,u", - relation(mappingTable("t,u"))); + assertEqual("describe t,u", relation(mappingTable("t,u"))); } @Test public void testDescribeCommandWithFullyQualifiedTableName() { - assertEqual("describe prometheus.http_metric", + assertEqual( + "describe prometheus.http_metric", relation(qualifiedName("prometheus", mappingTable("http_metric")))); - assertEqual("describe prometheus.schema.http_metric", + assertEqual( + "describe prometheus.schema.http_metric", relation(qualifiedName("prometheus", "schema", mappingTable("http_metric")))); } @Test public void test_fitRCFADCommand_withoutDataFormat() { - assertEqual("source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + assertEqual( + "source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + "anomaly_rate=0.1 anomaly_score_threshold=0.1 sample_size=256 " + "number_of_trees=256 time_zone='PST' output_after=256 " + "training_data_size=256", - new AD(relation("t"), ImmutableMap.builder() - .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) - .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) - .put("sample_size", new Literal(256, DataType.INTEGER)) - .put("number_of_trees", new Literal(256, DataType.INTEGER)) - .put("time_zone", new Literal("PST", DataType.STRING)) - .put("output_after", new Literal(256, DataType.INTEGER)) - .put("shingle_size", new Literal(10, DataType.INTEGER)) - .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) - .put("time_field", new Literal("timestamp", DataType.STRING)) - .put("training_data_size", new Literal(256, DataType.INTEGER)) - .build() - )); + new AD( + relation("t"), + ImmutableMap.builder() + .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) + .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) + .put("sample_size", new Literal(256, DataType.INTEGER)) + .put("number_of_trees", new Literal(256, DataType.INTEGER)) + .put("time_zone", new Literal("PST", DataType.STRING)) + .put("output_after", new Literal(256, DataType.INTEGER)) + .put("shingle_size", new Literal(10, DataType.INTEGER)) + .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) + .put("time_field", new Literal("timestamp", DataType.STRING)) + .put("training_data_size", new Literal(256, DataType.INTEGER)) + .build())); } @Test public void test_fitRCFADCommand_withDataFormat() { - assertEqual("source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + assertEqual( + "source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + "anomaly_rate=0.1 anomaly_score_threshold=0.1 sample_size=256 " + "number_of_trees=256 time_zone='PST' output_after=256 " + "training_data_size=256 date_format='HH:mm:ss yyyy-MM-dd'", - new AD(relation("t"), ImmutableMap.builder() - .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) - .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) - .put("sample_size", new Literal(256, DataType.INTEGER)) - .put("number_of_trees", new Literal(256, DataType.INTEGER)) - .put("date_format", new Literal("HH:mm:ss yyyy-MM-dd", DataType.STRING)) - .put("time_zone", new Literal("PST", DataType.STRING)) - .put("output_after", new Literal(256, DataType.INTEGER)) - .put("shingle_size", new Literal(10, DataType.INTEGER)) - .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) - .put("time_field", new Literal("timestamp", DataType.STRING)) - .put("training_data_size", new Literal(256, DataType.INTEGER)) - .build() - )); + new AD( + relation("t"), + ImmutableMap.builder() + .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) + .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) + .put("sample_size", new Literal(256, DataType.INTEGER)) + .put("number_of_trees", new Literal(256, DataType.INTEGER)) + .put("date_format", new Literal("HH:mm:ss yyyy-MM-dd", DataType.STRING)) + .put("time_zone", new Literal("PST", DataType.STRING)) + .put("output_after", new Literal(256, DataType.INTEGER)) + .put("shingle_size", new Literal(10, DataType.INTEGER)) + .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) + .put("time_field", new Literal("timestamp", DataType.STRING)) + .put("training_data_size", new Literal(256, DataType.INTEGER)) + .build())); } @Test public void test_batchRCFADCommand() { - assertEqual("source=t | AD", - new AD(relation("t"), ImmutableMap.of())); + assertEqual("source=t | AD", new AD(relation("t"), ImmutableMap.of())); } @Test public void testShowDataSourcesCommand() { - assertEqual("show datasources", - relation(DATASOURCES_TABLE_NAME)); + assertEqual("show datasources", relation(DATASOURCES_TABLE_NAME)); } protected void assertEqual(String query, Node expectedPlan) { diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java index 8472e61361..aa25a6fcc6 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static java.util.Collections.emptyList; @@ -58,464 +57,313 @@ public class AstExpressionBuilderTest extends AstBuilderTest { @Test public void testLogicalNotExpr() { - assertEqual("source=t not a=1", - filter( - relation("t"), - not( - compare("=", field("a"), intLiteral(1)) - ) - )); + assertEqual( + "source=t not a=1", filter(relation("t"), not(compare("=", field("a"), intLiteral(1))))); } @Test public void testLogicalOrExpr() { - assertEqual("source=t a=1 or b=2", + assertEqual( + "source=t a=1 or b=2", filter( relation("t"), - or( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + or(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalAndExpr() { - assertEqual("source=t a=1 and b=2", + assertEqual( + "source=t a=1 and b=2", filter( relation("t"), - and( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + and(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalAndExprWithoutKeywordAnd() { - assertEqual("source=t a=1 b=2", + assertEqual( + "source=t a=1 b=2", filter( relation("t"), - and( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + and(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalXorExpr() { - assertEqual("source=t a=1 xor b=2", + assertEqual( + "source=t a=1 xor b=2", filter( relation("t"), - xor( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + xor(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalLikeExpr() { - assertEqual("source=t like(a, '_a%b%c_d_')", - filter( - relation("t"), - function("like", field("a"), stringLiteral("_a%b%c_d_")) - )); + assertEqual( + "source=t like(a, '_a%b%c_d_')", + filter(relation("t"), function("like", field("a"), stringLiteral("_a%b%c_d_")))); } @Test public void testBooleanIsNullFunction() { - assertEqual("source=t isnull(a)", - filter( - relation("t"), - function("is null", field("a")) - )); + assertEqual("source=t isnull(a)", filter(relation("t"), function("is null", field("a")))); } @Test public void testBooleanIsNotNullFunction() { - assertEqual("source=t isnotnull(a)", - filter( - relation("t"), - function("is not null", field("a")) - )); + assertEqual( + "source=t isnotnull(a)", filter(relation("t"), function("is not null", field("a")))); } - /** - * Todo. search operator should not include functionCall, need to change antlr. - */ + /** Todo. search operator should not include functionCall, need to change antlr. */ @Ignore("search operator should not include functionCall, need to change antlr") public void testEvalExpr() { - assertEqual("source=t f=abs(a)", - filter( - relation("t"), - equalTo( - field("f"), - function("abs", field("a")) - ) - )); + assertEqual( + "source=t f=abs(a)", + filter(relation("t"), equalTo(field("f"), function("abs", field("a"))))); } @Test public void testEvalFunctionExpr() { - assertEqual("source=t | eval f=abs(a)", - eval( - relation("t"), - let( - field("f"), - function("abs", field("a")) - ) - )); + assertEqual( + "source=t | eval f=abs(a)", + eval(relation("t"), let(field("f"), function("abs", field("a"))))); } @Test public void testEvalFunctionExprNoArgs() { - assertEqual("source=t | eval f=PI()", - eval( - relation("t"), - let( - field("f"), - function("PI") - ) - )); + assertEqual("source=t | eval f=PI()", eval(relation("t"), let(field("f"), function("PI")))); } @Test public void testPositionFunctionExpr() { - assertEqual("source=t | eval f=position('substr' IN 'str')", + assertEqual( + "source=t | eval f=position('substr' IN 'str')", eval( relation("t"), - let( - field("f"), - function("position", - stringLiteral("substr"), stringLiteral("str")) - ) - )); + let(field("f"), function("position", stringLiteral("substr"), stringLiteral("str"))))); } @Test public void testEvalBinaryOperationExpr() { - assertEqual("source=t | eval f=a+b", - eval( - relation("t"), - let( - field("f"), - function("+", field("a"), field("b")) - ) - )); - assertEqual("source=t | eval f=(a+b)", - eval( - relation("t"), - let( - field("f"), - function("+", field("a"), field("b")) - ) - )); + assertEqual( + "source=t | eval f=a+b", + eval(relation("t"), let(field("f"), function("+", field("a"), field("b"))))); + assertEqual( + "source=t | eval f=(a+b)", + eval(relation("t"), let(field("f"), function("+", field("a"), field("b"))))); } @Test public void testLiteralValueBinaryOperationExpr() { - assertEqual("source=t | eval f=3+2", - eval( - relation("t"), - let( - field("f"), - function("+", intLiteral(3), intLiteral(2)) - ) - )); + assertEqual( + "source=t | eval f=3+2", + eval(relation("t"), let(field("f"), function("+", intLiteral(3), intLiteral(2))))); } @Test public void testBinaryOperationExprWithParentheses() { - assertEqual("source = t | where a = (1 + 2) * 3", + assertEqual( + "source = t | where a = (1 + 2) * 3", filter( relation("t"), - compare("=", + compare( + "=", field("a"), - function("*", - function("+", intLiteral(1), intLiteral(2)), - intLiteral(3))))); + function("*", function("+", intLiteral(1), intLiteral(2)), intLiteral(3))))); } @Test public void testBinaryOperationExprPrecedence() { - assertEqual("source = t | where a = 1 + 2 * 3", + assertEqual( + "source = t | where a = 1 + 2 * 3", filter( relation("t"), - compare("=", + compare( + "=", field("a"), - function("+", - intLiteral(1), - function("*", intLiteral(2), intLiteral(3)))))); + function("+", intLiteral(1), function("*", intLiteral(2), intLiteral(3)))))); } @Test public void testCompareExpr() { - assertEqual("source=t a='b'", - filter( - relation("t"), - compare("=", field("a"), stringLiteral("b")) - )); + assertEqual( + "source=t a='b'", filter(relation("t"), compare("=", field("a"), stringLiteral("b")))); } @Test public void testCompareFieldsExpr() { - assertEqual("source=t a>b", - filter( - relation("t"), - compare(">", field("a"), field("b")) - )); + assertEqual("source=t a>b", filter(relation("t"), compare(">", field("a"), field("b")))); } @Test public void testInExpr() { - assertEqual("source=t f in (1, 2, 3)", - filter( - relation("t"), - in( - field("f"), - intLiteral(1), intLiteral(2), intLiteral(3)) - )); + assertEqual( + "source=t f in (1, 2, 3)", + filter(relation("t"), in(field("f"), intLiteral(1), intLiteral(2), intLiteral(3)))); } @Test public void testFieldExpr() { - assertEqual("source=t | sort + f", - sort( - relation("t"), - field("f", defaultSortFieldArgs()) - )); + assertEqual("source=t | sort + f", sort(relation("t"), field("f", defaultSortFieldArgs()))); } @Test public void testSortFieldWithMinusKeyword() { - assertEqual("source=t | sort - f", + assertEqual( + "source=t | sort - f", sort( relation("t"), - field( - "f", - argument("asc", booleanLiteral(false)), - argument("type", nullLiteral()) - ) - )); + field("f", argument("asc", booleanLiteral(false)), argument("type", nullLiteral())))); } @Test public void testSortFieldWithBackticks() { - assertEqual("source=t | sort `f`", - sort( - relation("t"), - field("f", defaultSortFieldArgs()) - )); + assertEqual("source=t | sort `f`", sort(relation("t"), field("f", defaultSortFieldArgs()))); } @Test public void testSortFieldWithAutoKeyword() { - assertEqual("source=t | sort auto(f)", + assertEqual( + "source=t | sort auto(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("auto")) - ) - )); + argument("type", stringLiteral("auto"))))); } @Test public void testSortFieldWithIpKeyword() { - assertEqual("source=t | sort ip(f)", + assertEqual( + "source=t | sort ip(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("ip")) - ) - )); + argument("type", stringLiteral("ip"))))); } @Test public void testSortFieldWithNumKeyword() { - assertEqual("source=t | sort num(f)", + assertEqual( + "source=t | sort num(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("num")) - ) - )); + argument("type", stringLiteral("num"))))); } @Test public void testSortFieldWithStrKeyword() { - assertEqual("source=t | sort str(f)", + assertEqual( + "source=t | sort str(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("str")) - ) - )); + argument("type", stringLiteral("str"))))); } @Test public void testAggFuncCallExpr() { - assertEqual("source=t | stats avg(a) by b", + assertEqual( + "source=t | stats avg(a) by b", agg( relation("t"), - exprList( - alias( - "avg(a)", - aggregate("avg", field("a")) - ) - ), + exprList(alias("avg(a)", aggregate("avg", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testVarAggregationShouldPass() { - assertEqual("source=t | stats var_samp(a) by b", + assertEqual( + "source=t | stats var_samp(a) by b", agg( relation("t"), - exprList( - alias( - "var_samp(a)", - aggregate("var_samp", field("a")) - ) - ), + exprList(alias("var_samp(a)", aggregate("var_samp", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testVarpAggregationShouldPass() { - assertEqual("source=t | stats var_pop(a) by b", + assertEqual( + "source=t | stats var_pop(a) by b", agg( relation("t"), - exprList( - alias( - "var_pop(a)", - aggregate("var_pop", field("a")) - ) - ), + exprList(alias("var_pop(a)", aggregate("var_pop", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStdDevAggregationShouldPass() { - assertEqual("source=t | stats stddev_samp(a) by b", + assertEqual( + "source=t | stats stddev_samp(a) by b", agg( relation("t"), - exprList( - alias( - "stddev_samp(a)", - aggregate("stddev_samp", field("a")) - ) - ), + exprList(alias("stddev_samp(a)", aggregate("stddev_samp", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStdDevPAggregationShouldPass() { - assertEqual("source=t | stats stddev_pop(a) by b", + assertEqual( + "source=t | stats stddev_pop(a) by b", agg( relation("t"), - exprList( - alias( - "stddev_pop(a)", - aggregate("stddev_pop", field("a")) - ) - ), + exprList(alias("stddev_pop(a)", aggregate("stddev_pop", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testPercentileAggFuncExpr() { - assertEqual("source=t | stats percentile<1>(a)", + assertEqual( + "source=t | stats percentile<1>(a)", agg( relation("t"), exprList( - alias("percentile<1>(a)", - aggregate( - "percentile", - field("a"), - argument("rank", intLiteral(1)) - ) - ) - ), + alias( + "percentile<1>(a)", + aggregate("percentile", field("a"), argument("rank", intLiteral(1))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testCountFuncCallExpr() { - assertEqual("source=t | stats count() by b", + assertEqual( + "source=t | stats count() by b", agg( relation("t"), - exprList( - alias( - "count()", - aggregate("count", AllFields.of()) - ) - ), + exprList(alias("count()", aggregate("count", AllFields.of()))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testDistinctCount() { - assertEqual("source=t | stats distinct_count(a)", + assertEqual( + "source=t | stats distinct_count(a)", agg( relation("t"), - exprList( - alias("distinct_count(a)", - distinctAggregate("count", field("a")))), + exprList(alias("distinct_count(a)", distinctAggregate("count", field("a")))), emptyList(), emptyList(), defaultStatsArgs())); @@ -523,168 +371,114 @@ public void testDistinctCount() { @Test public void testTakeAggregationNoArgsShouldPass() { - assertEqual("source=t | stats take(a)", + assertEqual( + "source=t | stats take(a)", agg( relation("t"), - exprList(alias("take(a)", - aggregate("take", field("a"), unresolvedArg("size", intLiteral(10))))), + exprList( + alias( + "take(a)", + aggregate("take", field("a"), unresolvedArg("size", intLiteral(10))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testTakeAggregationWithArgsShouldPass() { - assertEqual("source=t | stats take(a, 5)", + assertEqual( + "source=t | stats take(a, 5)", agg( relation("t"), - exprList(alias("take(a, 5)", - aggregate("take", field("a"), unresolvedArg("size", intLiteral(5))))), + exprList( + alias( + "take(a, 5)", + aggregate("take", field("a"), unresolvedArg("size", intLiteral(5))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } - @Test public void testEvalFuncCallExpr() { - assertEqual("source=t | eval f=abs(a)", - eval( - relation("t"), - let( - field("f"), - function("abs", field("a")) - ) - )); + assertEqual( + "source=t | eval f=abs(a)", + eval(relation("t"), let(field("f"), function("abs", field("a"))))); } @Test public void testDataTypeFuncCall() { - assertEqual("source=t | eval f=cast(1 as string)", - eval( - relation("t"), - let( - field("f"), - cast(intLiteral(1), stringLiteral("string")) - ) - )); + assertEqual( + "source=t | eval f=cast(1 as string)", + eval(relation("t"), let(field("f"), cast(intLiteral(1), stringLiteral("string"))))); } @Test public void testNestedFieldName() { - assertEqual("source=t | fields field0.field1.field2", + assertEqual( + "source=t | fields field0.field1.field2", projectWithArg( relation("t"), defaultFieldsArgs(), - field( - qualifiedName("field0", "field1", "field2") - ) - )); + field(qualifiedName("field0", "field1", "field2")))); } @Test public void testFieldNameWithSpecialChars() { - assertEqual("source=t | fields `field-0`", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field( - qualifiedName("field-0") - ) - )); + assertEqual( + "source=t | fields `field-0`", + projectWithArg(relation("t"), defaultFieldsArgs(), field(qualifiedName("field-0")))); } @Test public void testNestedFieldNameWithSpecialChars() { - assertEqual("source=t | fields `field-0`.`field#1`.`field*2`", + assertEqual( + "source=t | fields `field-0`.`field#1`.`field*2`", projectWithArg( relation("t"), defaultFieldsArgs(), - field( - qualifiedName("field-0", "field#1", "field*2") - ) - )); + field(qualifiedName("field-0", "field#1", "field*2")))); } @Test public void testStringLiteralExpr() { - assertEqual("source=t a=\"string\"", - filter( - relation("t"), - compare( - "=", - field("a"), - stringLiteral("string") - ) - )); + assertEqual( + "source=t a=\"string\"", + filter(relation("t"), compare("=", field("a"), stringLiteral("string")))); } @Test public void testIntegerLiteralExpr() { - assertEqual("source=t a=1 b=-1", + assertEqual( + "source=t a=1 b=-1", filter( relation("t"), and( - compare( - "=", - field("a"), - intLiteral(1) - ), - compare( - "=", - field("b"), - intLiteral(-1) - ) - ) - )); + compare("=", field("a"), intLiteral(1)), + compare("=", field("b"), intLiteral(-1))))); } @Test public void testLongLiteralExpr() { - assertEqual("source=t a=1234567890123 b=-1234567890123", + assertEqual( + "source=t a=1234567890123 b=-1234567890123", filter( relation("t"), and( - compare( - "=", - field("a"), - longLiteral(1234567890123L) - ), - compare( - "=", - field("b"), - longLiteral(-1234567890123L) - ) - ) - )); + compare("=", field("a"), longLiteral(1234567890123L)), + compare("=", field("b"), longLiteral(-1234567890123L))))); } @Test public void testDoubleLiteralExpr() { - assertEqual("source=t b=0.1", - filter( - relation("t"), - compare( - "=", - field("b"), - doubleLiteral(0.1) - ) - )); + assertEqual( + "source=t b=0.1", filter(relation("t"), compare("=", field("b"), doubleLiteral(0.1)))); } @Test public void testBooleanLiteralExpr() { - assertEqual("source=t a=true", - filter( - relation("t"), - compare( - "=", - field("a"), - booleanLiteral(true) - ) - )); + assertEqual( + "source=t a=true", filter(relation("t"), compare("=", field("a"), booleanLiteral(true)))); } @Test @@ -692,42 +486,23 @@ public void testIntervalLiteralExpr() { assertEqual( "source=t a = interval 1 day", filter( - relation("t"), - compare( - "=", - field("a"), - intervalLiteral(1, DataType.INTEGER, "day") - ) - )); + relation("t"), compare("=", field("a"), intervalLiteral(1, DataType.INTEGER, "day")))); } @Test public void testKeywordsAsIdentifiers() { - assertEqual( - "source=timestamp", - relation("timestamp") - ); + assertEqual("source=timestamp", relation("timestamp")); assertEqual( "source=t | fields timestamp", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field("timestamp") - ) - ); + projectWithArg(relation("t"), defaultFieldsArgs(), field("timestamp"))); } @Test public void canBuildKeywordsAsIdentInQualifiedName() { assertEqual( "source=test | fields timestamp", - projectWithArg( - relation("test"), - defaultFieldsArgs(), - field("timestamp") - ) - ); + projectWithArg(relation("test"), defaultFieldsArgs(), field("timestamp"))); } @Test @@ -740,10 +515,7 @@ public void canBuildMatchRelevanceFunctionWithArguments() { "match", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -755,13 +527,11 @@ public void canBuildMulti_matchRelevanceFunctionWithArguments() { relation("test"), function( "multi_match", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -773,13 +543,11 @@ public void canBuildSimple_query_stringRelevanceFunctionWithArguments() { relation("test"), function( "simple_query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -791,13 +559,11 @@ public void canBuildQuery_stringRelevanceFunctionWithArguments() { relation("test"), function( "query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -816,11 +582,10 @@ public void functionNameCanBeUsedAsIdentifier() { + "| TIME_TO_SEC | TIMESTAMP | TO_DAYS | UNIX_TIMESTAMP | WEEK | YEAR"); assertFunctionNameCouldBeId( "SUBSTR | SUBSTRING | TRIM | LTRIM | RTRIM | LOWER | UPPER | CONCAT | CONCAT_WS | LENGTH " - + "| STRCMP | RIGHT | LEFT | ASCII | LOCATE | REPLACE" - ); + + "| STRCMP | RIGHT | LEFT | ASCII | LOCATE | REPLACE"); assertFunctionNameCouldBeId( "ABS | CEIL | CEILING | CONV | CRC32 | E | EXP | FLOOR | LN | LOG" - + " | LOG10 | LOG2 | MOD | PI |POW | POWER | RAND | ROUND | SIGN | SQRT | TRUNCATE " + + " | LOG10 | LOG2 | MOD | PI |POW | POWER | RAND | ROUND | SIGN | SQRT | TRUNCATE " + "| ACOS | ASIN | ATAN | ATAN2 | COS | COT | DEGREES | RADIANS | SIN | TAN"); assertFunctionNameCouldBeId( "SEARCH | DESCRIBE | SHOW | FROM | WHERE | FIELDS | RENAME | STATS " @@ -831,100 +596,79 @@ public void functionNameCanBeUsedAsIdentifier() { void assertFunctionNameCouldBeId(String antlrFunctionName) { List functionList = - Arrays.stream(antlrFunctionName.split("\\|")).map(String::stripLeading) - .map(String::stripTrailing).collect( - Collectors.toList()); + Arrays.stream(antlrFunctionName.split("\\|")) + .map(String::stripLeading) + .map(String::stripTrailing) + .collect(Collectors.toList()); assertFalse(functionList.isEmpty()); for (String functionName : functionList) { - assertEqual(String.format(Locale.ROOT, "source=t | fields %s", functionName), - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field( - qualifiedName(functionName) - ) - )); + assertEqual( + String.format(Locale.ROOT, "source=t | fields %s", functionName), + projectWithArg(relation("t"), defaultFieldsArgs(), field(qualifiedName(functionName)))); } } // https://github.com/opensearch-project/sql/issues/1318 @Test public void indexCanBeId() { - assertEqual("source = index | stats count() by index", + assertEqual( + "source = index | stats count() by index", agg( relation("index"), - exprList( - alias( - "count()", - aggregate("count", AllFields.of()) - ) - ), + exprList(alias("count()", aggregate("count", AllFields.of()))), emptyList(), - exprList( - alias( - "index", - field("index") - )), - defaultStatsArgs() - )); + exprList(alias("index", field("index"))), + defaultStatsArgs())); } @Test public void testExtractFunctionExpr() { - assertEqual("source=t | eval f=extract(day from '2001-05-07 10:11:12')", + assertEqual( + "source=t | eval f=extract(day from '2001-05-07 10:11:12')", eval( relation("t"), let( field("f"), - function("extract", - stringLiteral("day"), stringLiteral("2001-05-07 10:11:12")) - ) - )); + function("extract", stringLiteral("day"), stringLiteral("2001-05-07 10:11:12"))))); } - @Test public void testGet_FormatFunctionExpr() { - assertEqual("source=t | eval f=get_format(DATE,'USA')", + assertEqual( + "source=t | eval f=get_format(DATE,'USA')", eval( relation("t"), - let( - field("f"), - function("get_format", - stringLiteral("DATE"), stringLiteral("USA")) - ) - )); + let(field("f"), function("get_format", stringLiteral("DATE"), stringLiteral("USA"))))); } @Test public void testTimeStampAddFunctionExpr() { - assertEqual("source=t | eval f=timestampadd(YEAR, 15, '2001-03-06 00:00:00')", + assertEqual( + "source=t | eval f=timestampadd(YEAR, 15, '2001-03-06 00:00:00')", eval( relation("t"), let( field("f"), - function("timestampadd", + function( + "timestampadd", stringLiteral("YEAR"), intLiteral(15), - stringLiteral("2001-03-06 00:00:00")) - ) - )); + stringLiteral("2001-03-06 00:00:00"))))); } @Test public void testTimeStampDiffFunctionExpr() { - assertEqual("source=t | eval f=timestampdiff(" - + "YEAR, '1997-01-01 00:00:00', '2001-03-06 00:00:00')", + assertEqual( + "source=t | eval f=timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06 00:00:00')", eval( relation("t"), let( field("f"), - function("timestampdiff", + function( + "timestampdiff", stringLiteral("YEAR"), stringLiteral("1997-01-01 00:00:00"), - stringLiteral("2001-03-06 00:00:00")) - ) - )); + stringLiteral("2001-03-06 00:00:00"))))); } } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java index ddcde513dd..16aa0752e6 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static org.junit.Assert.assertEquals; @@ -31,6 +30,7 @@ public class AstNowLikeFunctionTest { /** * Set parameterized values used in test. + * * @param name Function name * @param hasFsp Whether function has fsp argument * @param hasShortcut Whether function has shortcut (call without `()`) @@ -43,24 +43,26 @@ public AstNowLikeFunctionTest(String name, Boolean hasFsp, Boolean hasShortcut) /** * Returns function data to test. + * * @return An iterable. */ @Parameterized.Parameters(name = "{0}") public static Iterable functionNames() { - return List.of(new Object[][]{ - {"now", false, false }, - {"current_timestamp", false, false}, - {"localtimestamp", false, false}, - {"localtime", false, false}, - {"sysdate", true, false}, - {"curtime", false, false}, - {"current_time", false, false}, - {"curdate", false, false}, - {"current_date", false, false}, - {"utc_date", false, false}, - {"utc_time", false, false}, - {"utc_timestamp", false, false} - }); + return List.of( + new Object[][] { + {"now", false, false}, + {"current_timestamp", false, false}, + {"localtimestamp", false, false}, + {"localtime", false, false}, + {"sysdate", true, false}, + {"curtime", false, false}, + {"current_time", false, false}, + {"curdate", false, false}, + {"current_date", false, false}, + {"utc_date", false, false}, + {"utc_time", false, false}, + {"utc_timestamp", false, false} + }); } private final String name; @@ -70,26 +72,20 @@ public static Iterable functionNames() { @Test public void test_function_call_eval() { assertEqual( - eval(relation("t"), let(field("r"), function(name))), - "source=t | eval r=" + name + "()" - ); + eval(relation("t"), let(field("r"), function(name))), "source=t | eval r=" + name + "()"); } @Test public void test_shortcut_eval() { Assume.assumeTrue(hasShortcut); - assertEqual( - eval(relation("t"), let(field("r"), function(name))), - "source=t | eval r=" + name - ); + assertEqual(eval(relation("t"), let(field("r"), function(name))), "source=t | eval r=" + name); } @Test public void test_function_call_where() { assertEqual( filter(relation("t"), compare("=", field("a"), function(name))), - "search source=t | where a=" + name + "()" - ); + "search source=t | where a=" + name + "()"); } @Test @@ -97,18 +93,15 @@ public void test_shortcut_where() { Assume.assumeTrue(hasShortcut); assertEqual( filter(relation("t"), compare("=", field("a"), function(name))), - "search source=t | where a=" + name - ); + "search source=t | where a=" + name); } @Test public void test_function_call_fsp() { Assume.assumeTrue(hasFsp); - assertEqual(filter( - relation("t"), - compare("=", field("a"), function(name, intLiteral(0))) - ), "search source=t | where a=" + name + "(0)" - ); + assertEqual( + filter(relation("t"), compare("=", field("a"), function(name, intLiteral(0)))), + "search source=t | where a=" + name + "(0)"); } protected void assertEqual(Node expectedPlan, String query) { diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java index de74e4932f..7d7b31e822 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java @@ -28,8 +28,7 @@ public class AstStatementBuilderTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); private PPLSyntaxParser parser = new PPLSyntaxParser(); @@ -38,9 +37,8 @@ public void buildQueryStatement() { assertEqual( "search source=t a=1", new Query( - project( - filter(relation("t"), compare("=", field("a"), - intLiteral(1))), AllFields.of()), 0)); + project(filter(relation("t"), compare("=", field("a"), intLiteral(1))), AllFields.of()), + 0)); } @Test @@ -50,8 +48,8 @@ public void buildExplainStatement() { new Explain( new Query( project( - filter(relation("t"), compare("=", field("a"), intLiteral(1))), - AllFields.of()), 0))); + filter(relation("t"), compare("=", field("a"), intLiteral(1))), AllFields.of()), + 0))); } private void assertEqual(String query, Statement expectedStatement) { @@ -66,7 +64,8 @@ private void assertExplainEqual(String query, Statement expectedStatement) { private Node plan(String query, boolean isExplain) { final AstStatementBuilder builder = - new AstStatementBuilder(new AstBuilder(new AstExpressionBuilder(), query), + new AstStatementBuilder( + new AstBuilder(new AstExpressionBuilder(), query), AstStatementBuilder.StatementBuilderContext.builder().isExplain(isExplain).build()); return builder.visit(parser.parse(query)); } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java index e18dfbd65c..761dbe2997 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static java.util.Collections.emptyList; @@ -28,12 +27,10 @@ public class ArgumentFactoryTest extends AstBuilderTest { @Test public void testFieldsCommandArgument() { - assertEqual("source=t | fields - a", + assertEqual( + "source=t | fields - a", projectWithArg( - relation("t"), - exprList(argument("exclude", booleanLiteral(true))), - field("a") - )); + relation("t"), exprList(argument("exclude", booleanLiteral(true))), field("a"))); } @Test @@ -47,20 +44,14 @@ public void testStatsCommandArgument() { "source=t | stats partitions=1 allnum=false delim=',' avg(a) dedup_splitvalues=true", agg( relation("t"), - exprList( - alias( - "avg(a)", - aggregate("avg", field("a"))) - ), + exprList(alias("avg(a)", aggregate("avg", field("a")))), emptyList(), emptyList(), exprList( argument("partitions", intLiteral(1)), argument("allnum", booleanLiteral(false)), argument("delim", stringLiteral(",")), - argument("dedupsplit", booleanLiteral(true)) - ) - )); + argument("dedupsplit", booleanLiteral(true))))); } @Test @@ -72,52 +63,43 @@ public void testStatsCommandDefaultArgument() { @Test public void testDedupCommandArgument() { - assertEqual("source=t | dedup 3 field0 keepempty=false consecutive=true", + assertEqual( + "source=t | dedup 3 field0 keepempty=false consecutive=true", dedupe( relation("t"), exprList( argument("number", intLiteral(3)), argument("keepempty", booleanLiteral(false)), - argument("consecutive", booleanLiteral(true)) - ), - field("field0") - )); + argument("consecutive", booleanLiteral(true))), + field("field0"))); } @Test public void testDedupCommandDefaultArgument() { assertEqual( - "source=t | dedup 1 field0 keepempty=false consecutive=false", - "source=t | dedup field0" - ); + "source=t | dedup 1 field0 keepempty=false consecutive=false", "source=t | dedup field0"); } @Test public void testSortCommandDefaultArgument() { - assertEqual( - "source=t | sort field0", - "source=t | sort field0" - ); + assertEqual("source=t | sort field0", "source=t | sort field0"); } @Test public void testSortFieldArgument() { - assertEqual("source=t | sort - auto(field0)", + assertEqual( + "source=t | sort - auto(field0)", sort( relation("t"), field( "field0", exprList( argument("asc", booleanLiteral(false)), - argument("type", stringLiteral("auto")) - ) - ) - )); + argument("type", stringLiteral("auto")))))); } @Test public void testNoArgConstructorForArgumentFactoryShouldPass() { new ArgumentFactory(); } - } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java index 1998647dba..cd51ea07df 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static org.junit.Assert.assertEquals; @@ -29,166 +28,140 @@ public class PPLQueryDataAnonymizerTest { @Test public void testSearchCommand() { - assertEquals("source=t | where a = ***", - anonymize("search source=t a=1") - ); + assertEquals("source=t | where a = ***", anonymize("search source=t a=1")); } @Test public void testTableFunctionCommand() { - assertEquals("source=prometheus.query_range(***,***,***,***)", - anonymize("source=prometheus.query_range('afsd',123,123,3)") - ); + assertEquals( + "source=prometheus.query_range(***,***,***,***)", + anonymize("source=prometheus.query_range('afsd',123,123,3)")); } @Test public void testPrometheusPPLCommand() { - assertEquals("source=prometheus.http_requests_process", - anonymize("source=prometheus.http_requests_process") - ); + assertEquals( + "source=prometheus.http_requests_process", + anonymize("source=prometheus.http_requests_process")); } @Test public void testWhereCommand() { - assertEquals("source=t | where a = ***", - anonymize("search source=t | where a=1") - ); + assertEquals("source=t | where a = ***", anonymize("search source=t | where a=1")); } @Test public void testFieldsCommandWithoutArguments() { - assertEquals("source=t | fields + f,g", - anonymize("source=t | fields f,g")); + assertEquals("source=t | fields + f,g", anonymize("source=t | fields f,g")); } @Test public void testFieldsCommandWithIncludeArguments() { - assertEquals("source=t | fields + f,g", - anonymize("source=t | fields + f,g")); + assertEquals("source=t | fields + f,g", anonymize("source=t | fields + f,g")); } @Test public void testFieldsCommandWithExcludeArguments() { - assertEquals("source=t | fields - f,g", - anonymize("source=t | fields - f,g")); + assertEquals("source=t | fields - f,g", anonymize("source=t | fields - f,g")); } @Test public void testRenameCommandWithMultiFields() { - assertEquals("source=t | rename f as g,h as i,j as k", + assertEquals( + "source=t | rename f as g,h as i,j as k", anonymize("source=t | rename f as g,h as i,j as k")); } @Test public void testStatsCommandWithByClause() { - assertEquals("source=t | stats count(a) by b", - anonymize("source=t | stats count(a) by b")); + assertEquals("source=t | stats count(a) by b", anonymize("source=t | stats count(a) by b")); } @Test public void testStatsCommandWithNestedFunctions() { - assertEquals("source=t | stats sum(+(a,b))", - anonymize("source=t | stats sum(a+b)")); + assertEquals("source=t | stats sum(+(a,b))", anonymize("source=t | stats sum(a+b)")); } @Test public void testDedupCommand() { - assertEquals("source=t | dedup f1,f2 1 keepempty=false consecutive=false", + assertEquals( + "source=t | dedup f1,f2 1 keepempty=false consecutive=false", anonymize("source=t | dedup f1, f2")); } @Test public void testHeadCommandWithNumber() { - assertEquals("source=t | head 3", - anonymize("source=t | head 3")); + assertEquals("source=t | head 3", anonymize("source=t | head 3")); } - //todo, sort order is ignored, it doesn't impact the log analysis. + // todo, sort order is ignored, it doesn't impact the log analysis. @Test public void testSortCommandWithOptions() { - assertEquals("source=t | sort f1,f2", - anonymize("source=t | sort - f1, + f2")); + assertEquals("source=t | sort f1,f2", anonymize("source=t | sort - f1, + f2")); } @Test public void testEvalCommand() { - assertEquals("source=t | eval r=abs(f)", - anonymize("source=t | eval r=abs(f)")); + assertEquals("source=t | eval r=abs(f)", anonymize("source=t | eval r=abs(f)")); } @Test public void testRareCommandWithGroupBy() { - assertEquals("source=t | rare 10 a by b", - anonymize("source=t | rare a by b")); + assertEquals("source=t | rare 10 a by b", anonymize("source=t | rare a by b")); } @Test public void testTopCommandWithNAndGroupBy() { - assertEquals("source=t | top 1 a by b", - anonymize("source=t | top 1 a by b")); + assertEquals("source=t | top 1 a by b", anonymize("source=t | top 1 a by b")); } @Test public void testAndExpression() { - assertEquals("source=t | where a = *** and b = ***", - anonymize("source=t | where a=1 and b=2") - ); + assertEquals("source=t | where a = *** and b = ***", anonymize("source=t | where a=1 and b=2")); } @Test public void testOrExpression() { - assertEquals("source=t | where a = *** or b = ***", - anonymize("source=t | where a=1 or b=2") - ); + assertEquals("source=t | where a = *** or b = ***", anonymize("source=t | where a=1 or b=2")); } @Test public void testXorExpression() { - assertEquals("source=t | where a = *** xor b = ***", - anonymize("source=t | where a=1 xor b=2") - ); + assertEquals("source=t | where a = *** xor b = ***", anonymize("source=t | where a=1 xor b=2")); } @Test public void testNotExpression() { - assertEquals("source=t | where not a = ***", - anonymize("source=t | where not a=1 ") - ); + assertEquals("source=t | where not a = ***", anonymize("source=t | where not a=1 ")); } @Test public void testQualifiedName() { - assertEquals("source=t | fields + field0", - anonymize("source=t | fields field0") - ); + assertEquals("source=t | fields + field0", anonymize("source=t | fields field0")); } @Test public void testDateFunction() { - assertEquals("source=t | eval date=DATE_ADD(DATE(***),INTERVAL *** HOUR)", - anonymize("source=t | eval date=DATE_ADD(DATE('2020-08-26'),INTERVAL 1 HOUR)") - ); + assertEquals( + "source=t | eval date=DATE_ADD(DATE(***),INTERVAL *** HOUR)", + anonymize("source=t | eval date=DATE_ADD(DATE('2020-08-26'),INTERVAL 1 HOUR)")); } @Test public void testExplain() { - assertEquals("source=t | fields + a", - anonymizeStatement("source=t | fields a", true) - ); + assertEquals("source=t | fields + a", anonymizeStatement("source=t | fields a", true)); } @Test public void testQuery() { - assertEquals("source=t | fields + a", - anonymizeStatement("source=t | fields a", false) - ); + assertEquals("source=t | fields + a", anonymizeStatement("source=t | fields a", false)); } @Test public void anonymizeFieldsNoArg() { - assertEquals("source=t | fields + f", - anonymize(projectWithArg(relation("t"), Collections.emptyList(), field("f"))) - ); + assertEquals( + "source=t | fields + f", + anonymize(projectWithArg(relation("t"), Collections.emptyList(), field("f")))); } private String anonymize(String query) { diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java index d64c8d5db4..7c1264e0b6 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/prometheus/build.gradle b/prometheus/build.gradle index e98dfd83e4..0d915a6d4a 100644 --- a/prometheus/build.gradle +++ b/prometheus/build.gradle @@ -13,6 +13,9 @@ repositories { mavenCentral() } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + dependencies { api project(':core') implementation project(':datasources') diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java index 9472be7487..2bfaaccd47 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java @@ -38,16 +38,18 @@ public PrometheusClientImpl(OkHttpClient okHttpClient, URI uri) { this.uri = uri; } - @Override public JSONObject queryRange(String query, Long start, Long end, String step) throws IOException { - String queryUrl = String.format("%s/api/v1/query_range?query=%s&start=%s&end=%s&step=%s", - uri.toString().replaceAll("/$", ""), URLEncoder.encode(query, StandardCharsets.UTF_8), - start, end, step); + String queryUrl = + String.format( + "%s/api/v1/query_range?query=%s&start=%s&end=%s&step=%s", + uri.toString().replaceAll("/$", ""), + URLEncoder.encode(query, StandardCharsets.UTF_8), + start, + end, + step); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); return jsonObject.getJSONObject("data"); @@ -55,14 +57,14 @@ public JSONObject queryRange(String query, Long start, Long end, String step) th @Override public List getLabels(String metricName) throws IOException { - String queryUrl = String.format("%s/api/v1/labels?%s=%s", - uri.toString().replaceAll("/$", ""), - URLEncoder.encode("match[]", StandardCharsets.UTF_8), - URLEncoder.encode(metricName, StandardCharsets.UTF_8)); + String queryUrl = + String.format( + "%s/api/v1/labels?%s=%s", + uri.toString().replaceAll("/$", ""), + URLEncoder.encode("match[]", StandardCharsets.UTF_8), + URLEncoder.encode(metricName, StandardCharsets.UTF_8)); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); return toListOfLabels(jsonObject.getJSONArray("data")); @@ -70,28 +72,26 @@ public List getLabels(String metricName) throws IOException { @Override public Map> getAllMetrics() throws IOException { - String queryUrl = String.format("%s/api/v1/metadata", - uri.toString().replaceAll("/$", "")); + String queryUrl = String.format("%s/api/v1/metadata", uri.toString().replaceAll("/$", "")); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); - TypeReference>> typeRef - = new TypeReference<>() {}; + TypeReference>> typeRef = new TypeReference<>() {}; return new ObjectMapper().readValue(jsonObject.getJSONObject("data").toString(), typeRef); } @Override public JSONArray queryExemplars(String query, Long start, Long end) throws IOException { - String queryUrl = String.format("%s/api/v1/query_exemplars?query=%s&start=%s&end=%s", - uri.toString().replaceAll("/$", ""), URLEncoder.encode(query, StandardCharsets.UTF_8), - start, end); + String queryUrl = + String.format( + "%s/api/v1/query_exemplars?query=%s&start=%s&end=%s", + uri.toString().replaceAll("/$", ""), + URLEncoder.encode(query, StandardCharsets.UTF_8), + start, + end); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); return jsonObject.getJSONArray("data"); @@ -100,8 +100,8 @@ public JSONArray queryExemplars(String query, Long start, Long end) throws IOExc private List toListOfLabels(JSONArray array) { List result = new ArrayList<>(); for (int i = 0; i < array.length(); i++) { - //__name__ is internal label in prometheus representing the metric name. - //Exempting this from labels list as it is not required in any of the operations. + // __name__ is internal label in prometheus representing the metric name. + // Exempting this from labels list as it is not required in any of the operations. if (!"__name__".equals(array.optString(i))) { result.add(array.optString(i)); } @@ -109,7 +109,6 @@ private List toListOfLabels(JSONArray array) { return result; } - private JSONObject readResponse(Response response) throws IOException { if (response.isSuccessful()) { JSONObject jsonObject = new JSONObject(Objects.requireNonNull(response.body()).string()); @@ -120,10 +119,9 @@ private JSONObject readResponse(Response response) throws IOException { } } else { throw new RuntimeException( - String.format("Request to Prometheus is Unsuccessful with : %s", Objects.requireNonNull( - response.body(), "Response body can't be null").string())); + String.format( + "Request to Prometheus is Unsuccessful with : %s", + Objects.requireNonNull(response.body(), "Response body can't be null").string())); } } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java index 88e9df6a88..0f687b3cd1 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java @@ -18,6 +18,6 @@ public class PrometheusFieldConstants { public static final String EXEMPLARS_KEY = "exemplars"; public static final String TRACE_ID_KEY = "traceID"; public static final String LABELS_KEY = "labels"; - public static final String TIMESTAMP_KEY = "timestamp"; - public static final String VALUE_KEY = "value"; + public static final String TIMESTAMP_KEY = "timestamp"; + public static final String VALUE_KEY = "value"; } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java index 9d455b3cfc..bbd3a36f5f 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java @@ -28,8 +28,8 @@ import org.opensearch.sql.prometheus.storage.QueryExemplarsTable; import org.opensearch.sql.storage.Table; -public class QueryExemplarFunctionImplementation extends FunctionExpression implements - TableFunctionImplementation { +public class QueryExemplarFunctionImplementation extends FunctionExpression + implements TableFunctionImplementation { private final FunctionName functionName; private final List arguments; @@ -39,10 +39,10 @@ public class QueryExemplarFunctionImplementation extends FunctionExpression impl * Required argument constructor. * * @param functionName name of the function - * @param arguments a list of arguments provided + * @param arguments a list of arguments provided */ - public QueryExemplarFunctionImplementation(FunctionName functionName, List arguments, - PrometheusClient prometheusClient) { + public QueryExemplarFunctionImplementation( + FunctionName functionName, List arguments, PrometheusClient prometheusClient) { super(functionName, arguments); this.functionName = functionName; this.arguments = arguments; @@ -51,10 +51,11 @@ public QueryExemplarFunctionImplementation(FunctionName functionName, List valueEnv) { - throw new UnsupportedOperationException(String.format( - "Prometheus defined function [%s] is only " - + "supported in SOURCE clause with prometheus connector catalog", - functionName)); + throw new UnsupportedOperationException( + String.format( + "Prometheus defined function [%s] is only " + + "supported in SOURCE clause with prometheus connector catalog", + functionName)); } @Override @@ -64,10 +65,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", ((NamedArgumentExpression) arg) - .getArgName(), ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } @@ -79,27 +85,26 @@ public Table applyArguments() { private PrometheusQueryExemplarsRequest buildExemplarsQueryRequest(List arguments) { PrometheusQueryExemplarsRequest request = new PrometheusQueryExemplarsRequest(); - arguments.forEach(arg -> { - String argName = ((NamedArgumentExpression) arg).getArgName(); - Expression argValue = ((NamedArgumentExpression) arg).getValue(); - ExprValue literalValue = argValue.valueOf(); - switch (argName) { - case QUERY: - request - .setQuery((String) literalValue.value()); - break; - case STARTTIME: - request.setStartTime(((Number) literalValue.value()).longValue()); - break; - case ENDTIME: - request.setEndTime(((Number) literalValue.value()).longValue()); - break; - default: - throw new ExpressionEvaluationException( - String.format("Invalid Function Argument:%s", argName)); - } - }); + arguments.forEach( + arg -> { + String argName = ((NamedArgumentExpression) arg).getArgName(); + Expression argValue = ((NamedArgumentExpression) arg).getValue(); + ExprValue literalValue = argValue.valueOf(); + switch (argName) { + case QUERY: + request.setQuery((String) literalValue.value()); + break; + case STARTTIME: + request.setStartTime(((Number) literalValue.value()).longValue()); + break; + case ENDTIME: + request.setEndTime(((Number) literalValue.value()).longValue()); + break; + default: + throw new ExpressionEvaluationException( + String.format("Invalid Function Argument:%s", argName)); + } + }); return request; } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java index 2d3710037a..0719bd1525 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java @@ -29,8 +29,8 @@ import org.opensearch.sql.prometheus.storage.PrometheusMetricTable; import org.opensearch.sql.storage.Table; -public class QueryRangeFunctionImplementation extends FunctionExpression implements - TableFunctionImplementation { +public class QueryRangeFunctionImplementation extends FunctionExpression + implements TableFunctionImplementation { private final FunctionName functionName; private final List arguments; @@ -40,10 +40,10 @@ public class QueryRangeFunctionImplementation extends FunctionExpression impleme * Required argument constructor. * * @param functionName name of the function - * @param arguments a list of expressions + * @param arguments a list of expressions */ - public QueryRangeFunctionImplementation(FunctionName functionName, List arguments, - PrometheusClient prometheusClient) { + public QueryRangeFunctionImplementation( + FunctionName functionName, List arguments, PrometheusClient prometheusClient) { super(functionName, arguments); this.functionName = functionName; this.arguments = arguments; @@ -52,10 +52,11 @@ public QueryRangeFunctionImplementation(FunctionName functionName, List valueEnv) { - throw new UnsupportedOperationException(String.format( - "Prometheus defined function [%s] is only " - + "supported in SOURCE clause with prometheus connector catalog", - functionName)); + throw new UnsupportedOperationException( + String.format( + "Prometheus defined function [%s] is only " + + "supported in SOURCE clause with prometheus connector catalog", + functionName)); } @Override @@ -65,10 +66,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", ((NamedArgumentExpression) arg) - .getArgName(), ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } @@ -80,30 +86,29 @@ public Table applyArguments() { private PrometheusQueryRequest buildQueryFromQueryRangeFunction(List arguments) { PrometheusQueryRequest prometheusQueryRequest = new PrometheusQueryRequest(); - arguments.forEach(arg -> { - String argName = ((NamedArgumentExpression) arg).getArgName(); - Expression argValue = ((NamedArgumentExpression) arg).getValue(); - ExprValue literalValue = argValue.valueOf(); - switch (argName) { - case QUERY: - prometheusQueryRequest - .setPromQl((String) literalValue.value()); - break; - case STARTTIME: - prometheusQueryRequest.setStartTime(((Number) literalValue.value()).longValue()); - break; - case ENDTIME: - prometheusQueryRequest.setEndTime(((Number) literalValue.value()).longValue()); - break; - case STEP: - prometheusQueryRequest.setStep(literalValue.value().toString()); - break; - default: - throw new ExpressionEvaluationException( - String.format("Invalid Function Argument:%s", argName)); - } - }); + arguments.forEach( + arg -> { + String argName = ((NamedArgumentExpression) arg).getArgName(); + Expression argValue = ((NamedArgumentExpression) arg).getValue(); + ExprValue literalValue = argValue.valueOf(); + switch (argName) { + case QUERY: + prometheusQueryRequest.setPromQl((String) literalValue.value()); + break; + case STARTTIME: + prometheusQueryRequest.setStartTime(((Number) literalValue.value()).longValue()); + break; + case ENDTIME: + prometheusQueryRequest.setEndTime(((Number) literalValue.value()).longValue()); + break; + case STEP: + prometheusQueryRequest.setStep(literalValue.value().toString()); + break; + default: + throw new ExpressionEvaluationException( + String.format("Invalid Function Argument:%s", argName)); + } + }); return prometheusQueryRequest; } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java index a82e5a397a..78d87b0a0b 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java @@ -22,9 +22,9 @@ import org.opensearch.sql.prometheus.functions.implementation.QueryExemplarFunctionImplementation; /** - * This class is for query_exemplars table function resolver {@link FunctionResolver}. - * It takes care of validating function arguments and also creating - * required {@link org.opensearch.sql.expression.function.TableFunctionImplementation} Class. + * This class is for query_exemplars table function resolver {@link FunctionResolver}. It takes care + * of validating function arguments and also creating required {@link + * org.opensearch.sql.expression.function.TableFunctionImplementation} Class. */ @RequiredArgsConstructor public class QueryExemplarsTableFunctionResolver implements FunctionResolver { @@ -41,13 +41,15 @@ public Pair resolve(FunctionSignature unreso final FunctionName functionName = FunctionName.of(QUERY_EXEMPLARS); FunctionSignature functionSignature = new FunctionSignature(FunctionName.of(QUERY_EXEMPLARS), List.of(STRING, LONG, LONG)); - FunctionBuilder functionBuilder = (functionProperties, arguments) -> { - final List argumentNames = List.of(QUERY, STARTTIME, ENDTIME); - validatePrometheusTableFunctionArguments(arguments, argumentNames); - List namedArguments = getNamedArgumentsOfTableFunction(arguments, argumentNames); - return new QueryExemplarFunctionImplementation(functionName, - namedArguments, prometheusClient); - }; + FunctionBuilder functionBuilder = + (functionProperties, arguments) -> { + final List argumentNames = List.of(QUERY, STARTTIME, ENDTIME); + validatePrometheusTableFunctionArguments(arguments, argumentNames); + List namedArguments = + getNamedArgumentsOfTableFunction(arguments, argumentNames); + return new QueryExemplarFunctionImplementation( + functionName, namedArguments, prometheusClient); + }; return Pair.of(functionSignature, functionBuilder); } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java index 8bb2a2d758..8dfa12134e 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java @@ -39,11 +39,14 @@ public Pair resolve(FunctionSignature unreso FunctionSignature functionSignature = new FunctionSignature(functionName, List.of(STRING, LONG, LONG, STRING)); final List argumentNames = List.of(QUERY, STARTTIME, ENDTIME, STEP); - FunctionBuilder functionBuilder = (functionProperties, arguments) -> { - validatePrometheusTableFunctionArguments(arguments, argumentNames); - List namedArguments = getNamedArgumentsOfTableFunction(arguments, argumentNames); - return new QueryRangeFunctionImplementation(functionName, namedArguments, prometheusClient); - }; + FunctionBuilder functionBuilder = + (functionProperties, arguments) -> { + validatePrometheusTableFunctionArguments(arguments, argumentNames); + List namedArguments = + getNamedArgumentsOfTableFunction(arguments, argumentNames); + return new QueryRangeFunctionImplementation( + functionName, namedArguments, prometheusClient); + }; return Pair.of(functionSignature, functionBuilder); } @@ -51,5 +54,4 @@ public Pair resolve(FunctionSignature unreso public FunctionName getFunctionName() { return FunctionName.of(QUERY_RANGE); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java index f2cefa85ec..bbc0516df6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java @@ -8,14 +8,10 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Handle Prometheus response. - */ +/** Handle Prometheus response. */ public interface PrometheusFunctionResponseHandle { - /** - * Return true if Prometheus response has more result. - */ + /** Return true if Prometheus response has more result. */ boolean hasNext(); /** @@ -24,8 +20,6 @@ public interface PrometheusFunctionResponseHandle { */ ExprValue next(); - /** - * Return ExecutionEngine.Schema of the Prometheus response. - */ + /** Return ExecutionEngine.Schema of the Prometheus response. */ ExecutionEngine.Schema schema(); } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java index f030ce8f7a..8d1c267a90 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java @@ -44,8 +44,8 @@ public QueryExemplarsFunctionResponseHandle(JSONArray responseArray) { private void constructIteratorAndSchema(JSONArray responseArray) { List columnList = new ArrayList<>(); columnList.add(new ExecutionEngine.Schema.Column(SERIES_LABELS_KEY, SERIES_LABELS_KEY, STRUCT)); - columnList.add(new ExecutionEngine.Schema.Column(EXEMPLARS_KEY, EXEMPLARS_KEY, - ExprCoreType.ARRAY)); + columnList.add( + new ExecutionEngine.Schema.Column(EXEMPLARS_KEY, EXEMPLARS_KEY, ExprCoreType.ARRAY)); this.schema = new ExecutionEngine.Schema(columnList); List result = new ArrayList<>(); for (int i = 0; i < responseArray.length(); i++) { @@ -62,7 +62,8 @@ private void constructIteratorAndSchema(JSONArray responseArray) { private ExprValue constructSeriesLabels(JSONObject seriesLabels) { LinkedHashMap seriesLabelsMap = new LinkedHashMap<>(); - seriesLabels.keySet() + seriesLabels + .keySet() .forEach(key -> seriesLabelsMap.put(key, new ExprStringValue(seriesLabels.getString(key)))); return new ExprTupleValue(seriesLabelsMap); } @@ -78,13 +79,13 @@ private ExprValue constructExemplarList(JSONArray exemplars) { private ExprValue constructExemplar(JSONObject exemplarsJSONObject) { LinkedHashMap exemplarHashMap = new LinkedHashMap<>(); - exemplarHashMap.put(LABELS_KEY, - constructLabelsInExemplar(exemplarsJSONObject.getJSONObject(LABELS_KEY))); - exemplarHashMap.put(TIMESTAMP_KEY, - new ExprTimestampValue(Instant.ofEpochMilli((long)( - exemplarsJSONObject.getDouble(TIMESTAMP_KEY) * 1000)))); - exemplarHashMap.put(VALUE_KEY, - new ExprDoubleValue(exemplarsJSONObject.getDouble(VALUE_KEY))); + exemplarHashMap.put( + LABELS_KEY, constructLabelsInExemplar(exemplarsJSONObject.getJSONObject(LABELS_KEY))); + exemplarHashMap.put( + TIMESTAMP_KEY, + new ExprTimestampValue( + Instant.ofEpochMilli((long) (exemplarsJSONObject.getDouble(TIMESTAMP_KEY) * 1000)))); + exemplarHashMap.put(VALUE_KEY, new ExprDoubleValue(exemplarsJSONObject.getDouble(VALUE_KEY))); return new ExprTupleValue(exemplarHashMap); } @@ -106,7 +107,6 @@ public ExprValue next() { return responseIterator.next(); } - @Override public ExecutionEngine.Schema schema() { return schema; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java index a3c68617e8..e10c9d7aff 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java @@ -30,9 +30,7 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Default implementation of QueryRangeFunctionResponseHandle. - */ +/** Default implementation of QueryRangeFunctionResponseHandle. */ public class QueryRangeFunctionResponseHandle implements PrometheusFunctionResponseHandle { private final JSONObject responseObject; @@ -62,25 +60,26 @@ private void constructIterator() { result.add(new ExprTupleValue(linkedHashMap)); } } else { - throw new RuntimeException(String.format("Unexpected Result Type: %s during Prometheus " - + "Response Parsing. 'matrix' resultType is expected", - responseObject.getString("resultType"))); + throw new RuntimeException( + String.format( + "Unexpected Result Type: %s during Prometheus " + + "Response Parsing. 'matrix' resultType is expected", + responseObject.getString("resultType"))); } this.responseIterator = result.iterator(); } - private static void extractTimestampAndValues(JSONArray values, - LinkedHashMap linkedHashMap) { + private static void extractTimestampAndValues( + JSONArray values, LinkedHashMap linkedHashMap) { List timestampList = new ArrayList<>(); List valueList = new ArrayList<>(); for (int j = 0; j < values.length(); j++) { JSONArray value = values.getJSONArray(j); - timestampList.add(new ExprTimestampValue( - Instant.ofEpochMilli((long) (value.getDouble(0) * 1000)))); + timestampList.add( + new ExprTimestampValue(Instant.ofEpochMilli((long) (value.getDouble(0) * 1000)))); valueList.add(new ExprDoubleValue(value.getDouble(1))); } - linkedHashMap.put(TIMESTAMP, - new ExprCollectionValue(timestampList)); + linkedHashMap.put(TIMESTAMP, new ExprCollectionValue(timestampList)); linkedHashMap.put(VALUE, new ExprCollectionValue(valueList)); } @@ -90,12 +89,10 @@ private void constructSchema() { private ExprValue extractLabels(JSONObject metric) { LinkedHashMap labelsMap = new LinkedHashMap<>(); - metric.keySet().forEach(key - -> labelsMap.put(key, new ExprStringValue(metric.getString(key)))); + metric.keySet().forEach(key -> labelsMap.put(key, new ExprStringValue(metric.getString(key)))); return new ExprTupleValue(labelsMap); } - private List getColumnList() { List columnList = new ArrayList<>(); columnList.add(new ExecutionEngine.Schema.Column(LABELS, LABELS, ExprCoreType.STRUCT)); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java index 8364173889..7e779eb77c 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java @@ -12,9 +12,7 @@ import org.opensearch.sql.storage.TableScanOperator; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * TableScanBuilder for query_exemplars table function of prometheus connector. - */ +/** TableScanBuilder for query_exemplars table function of prometheus connector. */ @AllArgsConstructor public class QueryExemplarsFunctionTableScanBuilder extends TableScanBuilder { @@ -24,8 +22,8 @@ public class QueryExemplarsFunctionTableScanBuilder extends TableScanBuilder { @Override public TableScanOperator build() { - return new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + return new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); } // Since we are determining the schema after table scan, @@ -34,5 +32,4 @@ public TableScanOperator build() { public boolean pushDownProject(LogicalProject project) { return true; } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java index 85ba6c854a..1a58429328 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java @@ -22,37 +22,37 @@ import org.opensearch.sql.storage.TableScanOperator; /** - * This class is for QueryExemplars function {@link TableScanOperator}. - * This takes care of getting exemplar data from prometheus by making - * {@link PrometheusQueryExemplarsRequest}. + * This class is for QueryExemplars function {@link TableScanOperator}. This takes care of getting + * exemplar data from prometheus by making {@link PrometheusQueryExemplarsRequest}. */ @RequiredArgsConstructor public class QueryExemplarsFunctionTableScanOperator extends TableScanOperator { private final PrometheusClient prometheusClient; - @Getter - private final PrometheusQueryExemplarsRequest request; + @Getter private final PrometheusQueryExemplarsRequest request; private QueryExemplarsFunctionResponseHandle queryExemplarsFunctionResponseHandle; private static final Logger LOG = LogManager.getLogger(); @Override public void open() { super.open(); - this.queryExemplarsFunctionResponseHandle - = AccessController - .doPrivileged((PrivilegedAction) () -> { - try { - JSONArray responseArray = prometheusClient.queryExemplars( - request.getQuery(), - request.getStartTime(), request.getEndTime()); - return new QueryExemplarsFunctionResponseHandle(responseArray); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException( - String.format("Error fetching data from prometheus server: %s", e.getMessage())); - } - }); + this.queryExemplarsFunctionResponseHandle = + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + JSONArray responseArray = + prometheusClient.queryExemplars( + request.getQuery(), request.getStartTime(), request.getEndTime()); + return new QueryExemplarsFunctionResponseHandle(responseArray); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + String.format( + "Error fetching data from prometheus server: %s", e.getMessage())); + } + }); } @Override @@ -72,7 +72,9 @@ public ExprValue next() { @Override public String explain() { - return String.format(Locale.ROOT, "query_exemplars(%s, %s, %s)", + return String.format( + Locale.ROOT, + "query_exemplars(%s, %s, %s)", request.getQuery(), request.getStartTime(), request.getEndTime()); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java index 00e2191d09..2d22c0af69 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java @@ -15,9 +15,8 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * TableScanBuilder for query_range table function of prometheus connector. - * we can merge this when we refactor for existing - * ppl queries based on prometheus connector. + * TableScanBuilder for query_range table function of prometheus connector. we can merge this when + * we refactor for existing ppl queries based on prometheus connector. */ @AllArgsConstructor public class QueryRangeFunctionTableScanBuilder extends TableScanBuilder { diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java index 68b9b60643..fc3f9f9a9b 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java @@ -23,9 +23,7 @@ import org.opensearch.sql.prometheus.request.PrometheusQueryRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * This a table scan operator to handle Query Range table function. - */ +/** This a table scan operator to handle Query Range table function. */ @RequiredArgsConstructor public class QueryRangeFunctionTableScanOperator extends TableScanOperator { @@ -39,19 +37,25 @@ public class QueryRangeFunctionTableScanOperator extends TableScanOperator { @Override public void open() { super.open(); - this.prometheusResponseHandle - = AccessController.doPrivileged((PrivilegedAction) () -> { - try { - JSONObject responseObject = prometheusClient.queryRange( - request.getPromQl(), - request.getStartTime(), request.getEndTime(), request.getStep()); - return new QueryRangeFunctionResponseHandle(responseObject); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException( - String.format("Error fetching data from prometheus server: %s", e.getMessage())); - } - }); + this.prometheusResponseHandle = + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + JSONObject responseObject = + prometheusClient.queryRange( + request.getPromQl(), + request.getStartTime(), + request.getEndTime(), + request.getStep()); + return new QueryRangeFunctionResponseHandle(responseObject); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + String.format( + "Error fetching data from prometheus server: %s", e.getMessage())); + } + }); } @Override @@ -71,7 +75,9 @@ public ExprValue next() { @Override public String explain() { - return String.format(Locale.ROOT, "query_range(%s, %s, %s, %s)", + return String.format( + Locale.ROOT, + "query_range(%s, %s, %s, %s)", request.getPromQl(), request.getStartTime(), request.getEndTime(), diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java index f348c699a1..f7c45f6ad2 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java @@ -20,10 +20,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; - -/** - * Logical Metric Scan along with aggregation Operation. - */ +/** Logical Metric Scan along with aggregation Operation. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) @@ -31,37 +28,29 @@ public class PrometheusLogicalMetricAgg extends LogicalPlan { private final String metricName; - /** - * Filter Condition. - */ - @Setter - private Expression filter; + /** Filter Condition. */ + @Setter private Expression filter; - /** - * Aggregation List. - */ - @Setter - private List aggregatorList; + /** Aggregation List. */ + @Setter private List aggregatorList; - /** - * Group List. - */ - @Setter - private List groupByList; + /** Group List. */ + @Setter private List groupByList; /** * Constructor for LogicalMetricAgg Logical Plan. * - * @param metricName metricName - * @param filter filter + * @param metricName metricName + * @param filter filter * @param aggregatorList aggregatorList - * @param groupByList groupByList. + * @param groupByList groupByList. */ @Builder - public PrometheusLogicalMetricAgg(String metricName, - Expression filter, - List aggregatorList, - List groupByList) { + public PrometheusLogicalMetricAgg( + String metricName, + Expression filter, + List aggregatorList, + List groupByList) { super(ImmutableList.of()); this.metricName = metricName; this.filter = filter; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java index 5e07d6899f..7b28a8a6c9 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java @@ -17,8 +17,8 @@ import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; /** - * Prometheus Logical Metric Scan Operation. - * In an optimized plan this node represents both Relation and Filter Operation. + * Prometheus Logical Metric Scan Operation. In an optimized plan this node represents both Relation + * and Filter Operation. */ @Getter @ToString @@ -27,9 +27,7 @@ public class PrometheusLogicalMetricScan extends LogicalPlan { private final String metricName; - /** - * Filter Condition. - */ + /** Filter Condition. */ private final Expression filter; /** @@ -39,8 +37,7 @@ public class PrometheusLogicalMetricScan extends LogicalPlan { * @param filter filter. */ @Builder - public PrometheusLogicalMetricScan(String metricName, - Expression filter) { + public PrometheusLogicalMetricScan(String metricName, Expression filter) { super(ImmutableList.of()); this.metricName = metricName; this.filter = filter; @@ -50,5 +47,4 @@ public PrometheusLogicalMetricScan(String metricName, public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitNode(this, context); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java index 8a365b2786..ea14be0e0a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java @@ -5,7 +5,6 @@ package org.opensearch.sql.prometheus.planner.logical; - import java.util.Arrays; import lombok.experimental.UtilityClass; import org.opensearch.sql.planner.optimizer.LogicalPlanOptimizer; @@ -13,20 +12,14 @@ import org.opensearch.sql.prometheus.planner.logical.rules.MergeAggAndRelation; import org.opensearch.sql.prometheus.planner.logical.rules.MergeFilterAndRelation; -/** - * Prometheus storage engine specified logical plan optimizer. - */ +/** Prometheus storage engine specified logical plan optimizer. */ @UtilityClass public class PrometheusLogicalPlanOptimizerFactory { - /** - * Create Prometheus storage specified logical plan optimizer. - */ + /** Create Prometheus storage specified logical plan optimizer. */ public static LogicalPlanOptimizer create() { - return new LogicalPlanOptimizer(Arrays.asList( - new MergeFilterAndRelation(), - new MergeAggAndIndexScan(), - new MergeAggAndRelation() - )); + return new LogicalPlanOptimizer( + Arrays.asList( + new MergeFilterAndRelation(), new MergeAggAndIndexScan(), new MergeAggAndRelation())); } } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java index 76bc6cc840..2594b74eb5 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.planner.logical.rules; import static com.facebook.presto.matching.Pattern.typeOf; @@ -20,9 +19,7 @@ import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricAgg; import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricScan; -/** - * Merge Aggregation -- Relation to MetricScanAggregation. - */ +/** Merge Aggregation -- Relation to MetricScanAggregation. */ public class MergeAggAndIndexScan implements Rule { private final Capture capture; @@ -31,22 +28,18 @@ public class MergeAggAndIndexScan implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of MergeAggAndIndexScan. - */ + /** Constructor of MergeAggAndIndexScan. */ public MergeAggAndIndexScan() { this.capture = Capture.newCapture(); - this.pattern = typeOf(LogicalAggregation.class) - .with(source().matching(typeOf(PrometheusLogicalMetricScan.class) - .capturedAs(capture))); + this.pattern = + typeOf(LogicalAggregation.class) + .with(source().matching(typeOf(PrometheusLogicalMetricScan.class).capturedAs(capture))); } @Override - public LogicalPlan apply(LogicalAggregation aggregation, - Captures captures) { + public LogicalPlan apply(LogicalAggregation aggregation, Captures captures) { PrometheusLogicalMetricScan indexScan = captures.get(capture); - return PrometheusLogicalMetricAgg - .builder() + return PrometheusLogicalMetricAgg.builder() .metricName(indexScan.getMetricName()) .filter(indexScan.getFilter()) .aggregatorList(aggregation.getAggregatorList()) diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java index fa9b0c7206..e6170e41f9 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.planner.logical.rules; import static com.facebook.presto.matching.Pattern.typeOf; @@ -20,9 +19,7 @@ import org.opensearch.sql.planner.optimizer.Rule; import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricAgg; -/** - * Merge Aggregation -- Relation to IndexScanAggregation. - */ +/** Merge Aggregation -- Relation to IndexScanAggregation. */ public class MergeAggAndRelation implements Rule { private final Capture relationCapture; @@ -31,21 +28,18 @@ public class MergeAggAndRelation implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of MergeAggAndRelation. - */ + /** Constructor of MergeAggAndRelation. */ public MergeAggAndRelation() { this.relationCapture = Capture.newCapture(); - this.pattern = typeOf(LogicalAggregation.class) - .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); + this.pattern = + typeOf(LogicalAggregation.class) + .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); } @Override - public LogicalPlan apply(LogicalAggregation aggregation, - Captures captures) { + public LogicalPlan apply(LogicalAggregation aggregation, Captures captures) { LogicalRelation relation = captures.get(relationCapture); - return PrometheusLogicalMetricAgg - .builder() + return PrometheusLogicalMetricAgg.builder() .metricName(relation.getRelationName()) .aggregatorList(aggregation.getAggregatorList()) .groupByList(aggregation.getGroupByList()) diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java index a99eb695be..2013938d73 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.planner.logical.rules; import static com.facebook.presto.matching.Pattern.typeOf; @@ -18,21 +17,18 @@ import org.opensearch.sql.planner.optimizer.Rule; import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricScan; -/** - * Merge Filter -- Relation to LogicalMetricScan. - */ +/** Merge Filter -- Relation to LogicalMetricScan. */ public class MergeFilterAndRelation implements Rule { private final Capture relationCapture; private final Pattern pattern; - /** - * Constructor of MergeFilterAndRelation. - */ + /** Constructor of MergeFilterAndRelation. */ public MergeFilterAndRelation() { this.relationCapture = Capture.newCapture(); - this.pattern = typeOf(LogicalFilter.class) - .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); + this.pattern = + typeOf(LogicalFilter.class) + .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); } @Override @@ -41,11 +37,9 @@ public Pattern pattern() { } @Override - public LogicalPlan apply(LogicalFilter filter, - Captures captures) { + public LogicalPlan apply(LogicalFilter filter, Captures captures) { LogicalRelation relation = captures.get(relationCapture); - return PrometheusLogicalMetricScan - .builder() + return PrometheusLogicalMetricScan.builder() .metricName(relation.getRelationName()) .filter(filter.getCondition()) .build(); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java index 9cf3d41522..d4eea97c48 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.request; import lombok.AllArgsConstructor; @@ -12,9 +11,7 @@ import lombok.NoArgsConstructor; import lombok.ToString; -/** - * Prometheus metric query request. - */ +/** Prometheus metric query request. */ @EqualsAndHashCode @Data @ToString @@ -22,19 +19,12 @@ @NoArgsConstructor public class PrometheusQueryExemplarsRequest { - /** - * PromQL. - */ + /** PromQL. */ private String query; - /** - * startTime of the query. - */ + /** startTime of the query. */ private Long startTime; - /** - * endTime of the query. - */ + /** endTime of the query. */ private Long endTime; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java index d287ea4d65..e24c27c52a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.request; import lombok.AllArgsConstructor; @@ -12,9 +11,7 @@ import lombok.NoArgsConstructor; import lombok.ToString; -/** - * Prometheus metric query request. - */ +/** Prometheus metric query request. */ @EqualsAndHashCode @Data @ToString @@ -22,24 +19,15 @@ @NoArgsConstructor public class PrometheusQueryRequest { - /** - * PromQL. - */ + /** PromQL. */ private String promQl; - /** - * startTime of the query. - */ + /** startTime of the query. */ private Long startTime; - /** - * endTime of the query. - */ + /** endTime of the query. */ private Long endTime; - /** - * step is the resolution required between startTime and endTime. - */ + /** step is the resolution required between startTime and endTime. */ private String step; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java index 2e0d46b3e8..b6a4e3c49c 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java @@ -5,7 +5,6 @@ * */ - package org.opensearch.sql.prometheus.request.system; import static org.opensearch.sql.data.model.ExprValueUtils.stringValue; @@ -31,60 +30,63 @@ import org.opensearch.sql.prometheus.storage.PrometheusMetricDefaultSchema; /** - * Describe Metric metadata request. - * This is triggered in case of both query range table function and relation. - * In case of table function metric name is null. + * Describe Metric metadata request. This is triggered in case of both query range table function + * and relation. In case of table function metric name is null. */ @ToString(onlyExplicitlyIncluded = true) public class PrometheusDescribeMetricRequest implements PrometheusSystemRequest { private final PrometheusClient prometheusClient; - @ToString.Include - private final String metricName; + @ToString.Include private final String metricName; private final DataSourceSchemaName dataSourceSchemaName; private static final Logger LOG = LogManager.getLogger(); /** - * Constructor for Prometheus Describe Metric Request. - * In case of pass through queries like query_range function, - * metric names are optional. + * Constructor for Prometheus Describe Metric Request. In case of pass through queries like + * query_range function, metric names are optional. * - * @param prometheusClient prometheusClient. + * @param prometheusClient prometheusClient. * @param dataSourceSchemaName dataSourceSchemaName. - * @param metricName metricName. + * @param metricName metricName. */ - public PrometheusDescribeMetricRequest(PrometheusClient prometheusClient, - DataSourceSchemaName dataSourceSchemaName, - @NonNull String metricName) { + public PrometheusDescribeMetricRequest( + PrometheusClient prometheusClient, + DataSourceSchemaName dataSourceSchemaName, + @NonNull String metricName) { this.prometheusClient = prometheusClient; this.metricName = metricName; this.dataSourceSchemaName = dataSourceSchemaName; } - /** - * Get the mapping of field and type. - * Returns labels and default schema fields. + * Get the mapping of field and type. Returns labels and default schema fields. * * @return mapping of field and type. */ public Map getFieldTypes() { Map fieldTypes = new HashMap<>(); - AccessController.doPrivileged((PrivilegedAction>) () -> { - try { - prometheusClient.getLabels(metricName) - .forEach(label -> fieldTypes.put(label, ExprCoreType.STRING)); - } catch (IOException e) { - LOG.error("Error while fetching labels for {} from prometheus: {}", - metricName, e.getMessage()); - throw new RuntimeException(String.format("Error while fetching labels " - + "for %s from prometheus: %s", metricName, e.getMessage())); - } - return null; - }); + AccessController.doPrivileged( + (PrivilegedAction>) + () -> { + try { + prometheusClient + .getLabels(metricName) + .forEach(label -> fieldTypes.put(label, ExprCoreType.STRING)); + } catch (IOException e) { + LOG.error( + "Error while fetching labels for {} from prometheus: {}", + metricName, + e.getMessage()); + throw new RuntimeException( + String.format( + "Error while fetching labels " + "for %s from prometheus: %s", + metricName, e.getMessage())); + } + return null; + }); fieldTypes.putAll(PrometheusMetricDefaultSchema.DEFAULT_MAPPING.getMapping()); return fieldTypes; } @@ -93,14 +95,17 @@ public Map getFieldTypes() { public List search() { List results = new ArrayList<>(); for (Map.Entry entry : getFieldTypes().entrySet()) { - results.add(row(entry.getKey(), entry.getValue().legacyTypeName().toLowerCase(), - dataSourceSchemaName)); + results.add( + row( + entry.getKey(), + entry.getValue().legacyTypeName().toLowerCase(), + dataSourceSchemaName)); } return results; } - private ExprTupleValue row(String fieldName, String fieldType, - DataSourceSchemaName dataSourceSchemaName) { + private ExprTupleValue row( + String fieldName, String fieldType, DataSourceSchemaName dataSourceSchemaName) { LinkedHashMap valueMap = new LinkedHashMap<>(); valueMap.put("TABLE_CATALOG", stringValue(dataSourceSchemaName.getDataSourceName())); valueMap.put("TABLE_SCHEMA", stringValue(dataSourceSchemaName.getSchemaName())); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java index f5d2a44340..0e6c2bb2c6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java @@ -34,28 +34,33 @@ public class PrometheusListMetricsRequest implements PrometheusSystemRequest { private static final Logger LOG = LogManager.getLogger(); - @Override public List search() { - return AccessController.doPrivileged((PrivilegedAction>) () -> { - try { - Map> result = prometheusClient.getAllMetrics(); - return result.keySet() - .stream() - .map(x -> { - MetricMetadata metricMetadata = result.get(x).get(0); - return row(x, metricMetadata.getType(), - metricMetadata.getUnit(), metricMetadata.getHelp()); - }) - .collect(Collectors.toList()); - } catch (IOException e) { - LOG.error("Error while fetching metric list for from prometheus: {}", - e.getMessage()); - throw new RuntimeException(String.format("Error while fetching metric list " - + "for from prometheus: %s", e.getMessage())); - } - }); - + return AccessController.doPrivileged( + (PrivilegedAction>) + () -> { + try { + Map> result = prometheusClient.getAllMetrics(); + return result.keySet().stream() + .map( + x -> { + MetricMetadata metricMetadata = result.get(x).get(0); + return row( + x, + metricMetadata.getType(), + metricMetadata.getUnit(), + metricMetadata.getHelp()); + }) + .collect(Collectors.toList()); + } catch (IOException e) { + LOG.error( + "Error while fetching metric list for from prometheus: {}", e.getMessage()); + throw new RuntimeException( + String.format( + "Error while fetching metric list " + "for from prometheus: %s", + e.getMessage())); + } + }); } private ExprTupleValue row(String metricName, String tableType, String unit, String help) { diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java index e68ad22c30..6972a9390c 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java @@ -10,9 +10,7 @@ import java.util.List; import org.opensearch.sql.data.model.ExprValue; -/** - * Prometheus system request query to get metadata Info. - */ +/** Prometheus system request query to get metadata Info. */ public interface PrometheusSystemRequest { /** @@ -21,5 +19,4 @@ public interface PrometheusSystemRequest { * @return list of ExprValue. */ List search(); - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java index ca250125e6..339d882f5a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java @@ -42,13 +42,12 @@ public class PrometheusResponse implements Iterable { /** * Constructor. * - * @param responseObject Prometheus responseObject. - * @param prometheusResponseFieldNames data model which - * contains field names for the metric measurement - * and timestamp fieldName. + * @param responseObject Prometheus responseObject. + * @param prometheusResponseFieldNames data model which contains field names for the metric + * measurement and timestamp fieldName. */ - public PrometheusResponse(JSONObject responseObject, - PrometheusResponseFieldNames prometheusResponseFieldNames) { + public PrometheusResponse( + JSONObject responseObject, PrometheusResponseFieldNames prometheusResponseFieldNames) { this.responseObject = responseObject; this.prometheusResponseFieldNames = prometheusResponseFieldNames; } @@ -66,18 +65,22 @@ public Iterator iterator() { for (int j = 0; j < values.length(); j++) { LinkedHashMap linkedHashMap = new LinkedHashMap<>(); JSONArray val = values.getJSONArray(j); - linkedHashMap.put(prometheusResponseFieldNames.getTimestampFieldName(), + linkedHashMap.put( + prometheusResponseFieldNames.getTimestampFieldName(), new ExprTimestampValue(Instant.ofEpochMilli((long) (val.getDouble(0) * 1000)))); - linkedHashMap.put(prometheusResponseFieldNames.getValueFieldName(), getValue(val, 1, - prometheusResponseFieldNames.getValueType())); + linkedHashMap.put( + prometheusResponseFieldNames.getValueFieldName(), + getValue(val, 1, prometheusResponseFieldNames.getValueType())); insertLabels(linkedHashMap, metric); result.add(new ExprTupleValue(linkedHashMap)); } } } else { - throw new RuntimeException(String.format("Unexpected Result Type: %s during Prometheus " - + "Response Parsing. 'matrix' resultType is expected", - responseObject.getString(RESULT_TYPE_KEY))); + throw new RuntimeException( + String.format( + "Unexpected Result Type: %s during Prometheus " + + "Response Parsing. 'matrix' resultType is expected", + responseObject.getString(RESULT_TYPE_KEY))); } return result.iterator(); } @@ -103,12 +106,11 @@ private String getKey(String key) { } else { return this.prometheusResponseFieldNames.getGroupByList().stream() .filter(expression -> expression.getDelegated() instanceof ReferenceExpression) - .filter(expression - -> ((ReferenceExpression) expression.getDelegated()).getAttr().equals(key)) + .filter( + expression -> ((ReferenceExpression) expression.getDelegated()).getAttr().equals(key)) .findFirst() .map(NamedExpression::getName) .orElse(key); } } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java index 790189d903..f0933eee9d 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java @@ -20,12 +20,11 @@ @Getter @RequiredArgsConstructor public enum PrometheusMetricDefaultSchema { - - DEFAULT_MAPPING(new ImmutableMap.Builder() - .put(TIMESTAMP, ExprCoreType.TIMESTAMP) - .put(VALUE, ExprCoreType.DOUBLE) - .build()); + DEFAULT_MAPPING( + new ImmutableMap.Builder() + .put(TIMESTAMP, ExprCoreType.TIMESTAMP) + .put(VALUE, ExprCoreType.DOUBLE) + .build()); private final Map mapping; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java index 7f75cb3c07..598e388914 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java @@ -23,26 +23,19 @@ import org.opensearch.sql.prometheus.storage.model.PrometheusResponseFieldNames; import org.opensearch.sql.storage.TableScanOperator; -/** - * Prometheus metric scan operator. - */ +/** Prometheus metric scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class PrometheusMetricScan extends TableScanOperator { private final PrometheusClient prometheusClient; - @EqualsAndHashCode.Include - @Getter - @Setter - @ToString.Include + @EqualsAndHashCode.Include @Getter @Setter @ToString.Include private PrometheusQueryRequest request; private Iterator iterator; - @Setter - private PrometheusResponseFieldNames prometheusResponseFieldNames; - + @Setter private PrometheusResponseFieldNames prometheusResponseFieldNames; private static final Logger LOG = LogManager.getLogger(); @@ -60,17 +53,25 @@ public PrometheusMetricScan(PrometheusClient prometheusClient) { @Override public void open() { super.open(); - this.iterator = AccessController.doPrivileged((PrivilegedAction>) () -> { - try { - JSONObject responseObject = prometheusClient.queryRange( - request.getPromQl(), - request.getStartTime(), request.getEndTime(), request.getStep()); - return new PrometheusResponse(responseObject, prometheusResponseFieldNames).iterator(); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException("Error fetching data from prometheus server. " + e.getMessage()); - } - }); + this.iterator = + AccessController.doPrivileged( + (PrivilegedAction>) + () -> { + try { + JSONObject responseObject = + prometheusClient.queryRange( + request.getPromQl(), + request.getStartTime(), + request.getEndTime(), + request.getStep()); + return new PrometheusResponse(responseObject, prometheusResponseFieldNames) + .iterator(); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + "Error fetching data from prometheus server. " + e.getMessage()); + } + }); } @Override diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java index 4844e1f6db..1124e93608 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.storage; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.LABELS; @@ -26,40 +25,30 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * Prometheus table (metric) implementation. - * This can be constructed from a metric Name - * or from PrometheusQueryRequest In case of query_range table function. + * Prometheus table (metric) implementation. This can be constructed from a metric Name or from + * PrometheusQueryRequest In case of query_range table function. */ public class PrometheusMetricTable implements Table { private final PrometheusClient prometheusClient; - @Getter - private final String metricName; - - @Getter - private final PrometheusQueryRequest prometheusQueryRequest; + @Getter private final String metricName; + @Getter private final PrometheusQueryRequest prometheusQueryRequest; - /** - * The cached mapping of field and type in index. - */ + /** The cached mapping of field and type in index. */ private Map cachedFieldTypes = null; - /** - * Constructor only with metric name. - */ + /** Constructor only with metric name. */ public PrometheusMetricTable(PrometheusClient prometheusService, @Nonnull String metricName) { this.prometheusClient = prometheusService; this.metricName = metricName; this.prometheusQueryRequest = null; } - /** - * Constructor for entire promQl Request. - */ - public PrometheusMetricTable(PrometheusClient prometheusService, - @Nonnull PrometheusQueryRequest prometheusQueryRequest) { + /** Constructor for entire promQl Request. */ + public PrometheusMetricTable( + PrometheusClient prometheusService, @Nonnull PrometheusQueryRequest prometheusQueryRequest) { this.prometheusClient = prometheusService; this.metricName = null; this.prometheusQueryRequest = prometheusQueryRequest; @@ -67,14 +56,12 @@ public PrometheusMetricTable(PrometheusClient prometheusService, @Override public boolean exists() { - throw new UnsupportedOperationException( - "Prometheus metric exists operation is not supported"); + throw new UnsupportedOperationException("Prometheus metric exists operation is not supported"); } @Override public void create(Map schema) { - throw new UnsupportedOperationException( - "Prometheus metric create operation is not supported"); + throw new UnsupportedOperationException("Prometheus metric create operation is not supported"); } @Override @@ -82,11 +69,10 @@ public Map getFieldTypes() { if (cachedFieldTypes == null) { if (metricName != null) { cachedFieldTypes = - new PrometheusDescribeMetricRequest(prometheusClient, null, - metricName).getFieldTypes(); + new PrometheusDescribeMetricRequest(prometheusClient, null, metricName).getFieldTypes(); } else { - cachedFieldTypes = new HashMap<>(PrometheusMetricDefaultSchema.DEFAULT_MAPPING - .getMapping()); + cachedFieldTypes = + new HashMap<>(PrometheusMetricDefaultSchema.DEFAULT_MAPPING.getMapping()); cachedFieldTypes.put(LABELS, ExprCoreType.STRING); } } @@ -95,8 +81,7 @@ public Map getFieldTypes() { @Override public PhysicalPlan implement(LogicalPlan plan) { - PrometheusMetricScan metricScan = - new PrometheusMetricScan(prometheusClient); + PrometheusMetricScan metricScan = new PrometheusMetricScan(prometheusClient); return plan.accept(new PrometheusDefaultImplementor(), metricScan); } @@ -105,8 +90,8 @@ public LogicalPlan optimize(LogicalPlan plan) { return PrometheusLogicalPlanOptimizerFactory.create().optimize(plan); } - //Only handling query_range function for now. - //we need to move PPL implementations to ScanBuilder in future. + // Only handling query_range function for now. + // we need to move PPL implementations to ScanBuilder in future. @Override public TableScanBuilder createScanBuilder() { if (metricName == null) { diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java index 738eb023b6..29fc15e2d0 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java @@ -24,10 +24,7 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; - -/** - * Prometheus storage engine implementation. - */ +/** Prometheus storage engine implementation. */ @RequiredArgsConstructor public class PrometheusStorageEngine implements StorageEngine { @@ -52,16 +49,14 @@ public Table getTable(DataSourceSchemaName dataSourceSchemaName, String tableNam } } - private Table resolveInformationSchemaTable(DataSourceSchemaName dataSourceSchemaName, - String tableName) { + private Table resolveInformationSchemaTable( + DataSourceSchemaName dataSourceSchemaName, String tableName) { if (SystemIndexUtils.TABLE_NAME_FOR_TABLES_INFO.equals(tableName)) { - return new PrometheusSystemTable(prometheusClient, - dataSourceSchemaName, SystemIndexUtils.TABLE_INFO); + return new PrometheusSystemTable( + prometheusClient, dataSourceSchemaName, SystemIndexUtils.TABLE_INFO); } else { throw new SemanticCheckException( String.format("Information Schema doesn't contain %s table", tableName)); } } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java index b3ecd25af3..edae263ce3 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java @@ -56,23 +56,20 @@ public DataSourceType getDataSourceType() { @Override public DataSource createDataSource(DataSourceMetadata metadata) { return new DataSource( - metadata.getName(), - DataSourceType.PROMETHEUS, - getStorageEngine(metadata.getProperties())); + metadata.getName(), DataSourceType.PROMETHEUS, getStorageEngine(metadata.getProperties())); } - - //Need to refactor to a separate Validator class. + // Need to refactor to a separate Validator class. private void validateDataSourceConfigProperties(Map dataSourceMetadataConfig) throws URISyntaxException { if (dataSourceMetadataConfig.get(AUTH_TYPE) != null) { - AuthenticationType authenticationType - = AuthenticationType.get(dataSourceMetadataConfig.get(AUTH_TYPE)); + AuthenticationType authenticationType = + AuthenticationType.get(dataSourceMetadataConfig.get(AUTH_TYPE)); if (AuthenticationType.BASICAUTH.equals(authenticationType)) { validateMissingFields(dataSourceMetadataConfig, Set.of(URI, USERNAME, PASSWORD)); } else if (AuthenticationType.AWSSIGV4AUTH.equals(authenticationType)) { - validateMissingFields(dataSourceMetadataConfig, Set.of(URI, ACCESS_KEY, SECRET_KEY, - REGION)); + validateMissingFields( + dataSourceMetadataConfig, Set.of(URI, ACCESS_KEY, SECRET_KEY, REGION)); } } else { validateMissingFields(dataSourceMetadataConfig, Set.of(URI)); @@ -83,20 +80,21 @@ private void validateDataSourceConfigProperties(Map dataSourceMe StorageEngine getStorageEngine(Map requiredConfig) { PrometheusClient prometheusClient; prometheusClient = - AccessController.doPrivileged((PrivilegedAction) () -> { - try { - validateDataSourceConfigProperties(requiredConfig); - return new PrometheusClientImpl(getHttpClient(requiredConfig), - new URI(requiredConfig.get(URI))); - } catch (URISyntaxException e) { - throw new IllegalArgumentException( - String.format("Invalid URI in prometheus properties: %s", e.getMessage())); - } - }); + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + validateDataSourceConfigProperties(requiredConfig); + return new PrometheusClientImpl( + getHttpClient(requiredConfig), new URI(requiredConfig.get(URI))); + } catch (URISyntaxException e) { + throw new IllegalArgumentException( + String.format("Invalid URI in prometheus properties: %s", e.getMessage())); + } + }); return new PrometheusStorageEngine(prometheusClient); } - private OkHttpClient getHttpClient(Map config) { OkHttpClient.Builder okHttpClient = new OkHttpClient.Builder(); okHttpClient.callTimeout(1, TimeUnit.MINUTES); @@ -104,16 +102,19 @@ private OkHttpClient getHttpClient(Map config) { if (config.get(AUTH_TYPE) != null) { AuthenticationType authenticationType = AuthenticationType.get(config.get(AUTH_TYPE)); if (AuthenticationType.BASICAUTH.equals(authenticationType)) { - okHttpClient.addInterceptor(new BasicAuthenticationInterceptor(config.get(USERNAME), - config.get(PASSWORD))); + okHttpClient.addInterceptor( + new BasicAuthenticationInterceptor(config.get(USERNAME), config.get(PASSWORD))); } else if (AuthenticationType.AWSSIGV4AUTH.equals(authenticationType)) { - okHttpClient.addInterceptor(new AwsSigningInterceptor( - new AWSStaticCredentialsProvider( - new BasicAWSCredentials(config.get(ACCESS_KEY), config.get(SECRET_KEY))), - config.get(REGION), "aps")); + okHttpClient.addInterceptor( + new AwsSigningInterceptor( + new AWSStaticCredentialsProvider( + new BasicAWSCredentials(config.get(ACCESS_KEY), config.get(SECRET_KEY))), + config.get(REGION), + "aps")); } else { throw new IllegalArgumentException( - String.format("AUTH Type : %s is not supported with Prometheus Connector", + String.format( + "AUTH Type : %s is not supported with Prometheus Connector", config.get(AUTH_TYPE))); } } @@ -132,13 +133,14 @@ private void validateMissingFields(Map config, Set field } StringBuilder errorStringBuilder = new StringBuilder(); if (missingFields.size() > 0) { - errorStringBuilder.append(String.format( - "Missing %s fields in the Prometheus connector properties.", missingFields)); + errorStringBuilder.append( + String.format( + "Missing %s fields in the Prometheus connector properties.", missingFields)); } if (invalidLengthFields.size() > 0) { - errorStringBuilder.append(String.format( - "Fields %s exceeds more than 1000 characters.", invalidLengthFields)); + errorStringBuilder.append( + String.format("Fields %s exceeds more than 1000 characters.", invalidLengthFields)); } if (errorStringBuilder.length() > 0) { throw new IllegalArgumentException(errorStringBuilder.toString()); @@ -148,8 +150,9 @@ private void validateMissingFields(Map config, Set field private void validateURI(Map config) throws URISyntaxException { URI uri = new URI(config.get(URI)); String host = uri.getHost(); - if (host == null || (!(DomainValidator.getInstance().isValid(host) - || DomainValidator.getInstance().isValidLocalTld(host)))) { + if (host == null + || (!(DomainValidator.getInstance().isValid(host) + || DomainValidator.getInstance().isValidLocalTld(host)))) { throw new IllegalArgumentException( String.format("Invalid hostname in the uri: %s", config.get(URI))); } else { @@ -158,10 +161,10 @@ private void validateURI(Map config) throws URISyntaxException { Matcher matcher = allowHostsPattern.matcher(host); if (!matcher.matches()) { throw new IllegalArgumentException( - String.format("Disallowed hostname in the uri: %s. Validate with %s config", + String.format( + "Disallowed hostname in the uri: %s. Validate with %s config", config.get(URI), Settings.Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue())); } } } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java index dcb87c2cce..9ce8ae85fb 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java @@ -20,20 +20,16 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * This is {@link Table} for querying exemplars in prometheus Table. - * Since {@link PrometheusMetricTable} is overloaded with query_range and normal - * PPL metric queries. Created a separate table for handling - * {@link PrometheusQueryExemplarsRequest} + * This is {@link Table} for querying exemplars in prometheus Table. Since {@link + * PrometheusMetricTable} is overloaded with query_range and normal PPL metric queries. Created a + * separate table for handling {@link PrometheusQueryExemplarsRequest} */ @RequiredArgsConstructor public class QueryExemplarsTable implements Table { - @Getter - private final PrometheusClient prometheusClient; - - @Getter - private final PrometheusQueryExemplarsRequest exemplarsRequest; + @Getter private final PrometheusClient prometheusClient; + @Getter private final PrometheusQueryExemplarsRequest exemplarsRequest; @Override public Map getFieldTypes() { @@ -49,5 +45,4 @@ public PhysicalPlan implement(LogicalPlan plan) { public TableScanBuilder createScanBuilder() { return new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java index 6d426d13c8..f83a97dc06 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java @@ -29,13 +29,9 @@ import org.opensearch.sql.prometheus.storage.querybuilder.StepParameterResolver; import org.opensearch.sql.prometheus.storage.querybuilder.TimeRangeParametersResolver; -/** - * Default Implementor of Logical plan for prometheus. - */ +/** Default Implementor of Logical plan for prometheus. */ @RequiredArgsConstructor -public class PrometheusDefaultImplementor - extends DefaultImplementor { - +public class PrometheusDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitNode(LogicalPlan plan, PrometheusMetricScan context) { @@ -44,62 +40,64 @@ public PhysicalPlan visitNode(LogicalPlan plan, PrometheusMetricScan context) { } else if (plan instanceof PrometheusLogicalMetricAgg) { return visitIndexAggregation((PrometheusLogicalMetricAgg) plan, context); } else { - throw new IllegalStateException(StringUtils.format("unexpected plan node type %s", - plan.getClass())); + throw new IllegalStateException( + StringUtils.format("unexpected plan node type %s", plan.getClass())); } } - /** - * Implement PrometheusLogicalMetricScan. - */ - public PhysicalPlan visitIndexScan(PrometheusLogicalMetricScan node, - PrometheusMetricScan context) { + /** Implement PrometheusLogicalMetricScan. */ + public PhysicalPlan visitIndexScan( + PrometheusLogicalMetricScan node, PrometheusMetricScan context) { String query = SeriesSelectionQueryBuilder.build(node.getMetricName(), node.getFilter()); context.getRequest().setPromQl(query); setTimeRangeParameters(node.getFilter(), context); - context.getRequest() - .setStep(StepParameterResolver.resolve(context.getRequest().getStartTime(), - context.getRequest().getEndTime(), null)); + context + .getRequest() + .setStep( + StepParameterResolver.resolve( + context.getRequest().getStartTime(), context.getRequest().getEndTime(), null)); return context; } - /** - * Implement PrometheusLogicalMetricAgg. - */ - public PhysicalPlan visitIndexAggregation(PrometheusLogicalMetricAgg node, - PrometheusMetricScan context) { + /** Implement PrometheusLogicalMetricAgg. */ + public PhysicalPlan visitIndexAggregation( + PrometheusLogicalMetricAgg node, PrometheusMetricScan context) { setTimeRangeParameters(node.getFilter(), context); - context.getRequest() - .setStep(StepParameterResolver.resolve(context.getRequest().getStartTime(), - context.getRequest().getEndTime(), node.getGroupByList())); + context + .getRequest() + .setStep( + StepParameterResolver.resolve( + context.getRequest().getStartTime(), + context.getRequest().getEndTime(), + node.getGroupByList())); String step = context.getRequest().getStep(); - String seriesSelectionQuery - = SeriesSelectionQueryBuilder.build(node.getMetricName(), node.getFilter()); + String seriesSelectionQuery = + SeriesSelectionQueryBuilder.build(node.getMetricName(), node.getFilter()); - String aggregateQuery - = AggregationQueryBuilder.build(node.getAggregatorList(), - node.getGroupByList()); + String aggregateQuery = + AggregationQueryBuilder.build(node.getAggregatorList(), node.getGroupByList()); String finalQuery = String.format(aggregateQuery, seriesSelectionQuery + "[" + step + "]"); context.getRequest().setPromQl(finalQuery); - //Since prometheus response doesn't have any fieldNames in its output. - //the field names are sent to PrometheusResponse constructor via context. + // Since prometheus response doesn't have any fieldNames in its output. + // the field names are sent to PrometheusResponse constructor via context. setPrometheusResponseFieldNames(node, context); return context; } @Override - public PhysicalPlan visitRelation(LogicalRelation node, - PrometheusMetricScan context) { + public PhysicalPlan visitRelation(LogicalRelation node, PrometheusMetricScan context) { PrometheusMetricTable prometheusMetricTable = (PrometheusMetricTable) node.getTable(); String query = SeriesSelectionQueryBuilder.build(node.getRelationName(), null); context.getRequest().setPromQl(query); setTimeRangeParameters(null, context); - context.getRequest() - .setStep(StepParameterResolver.resolve(context.getRequest().getStartTime(), - context.getRequest().getEndTime(), null)); + context + .getRequest() + .setStep( + StepParameterResolver.resolve( + context.getRequest().getStartTime(), context.getRequest().getEndTime(), null)); return context; } @@ -110,8 +108,8 @@ private void setTimeRangeParameters(Expression filter, PrometheusMetricScan cont context.getRequest().setEndTime(timeRange.getSecond()); } - private void setPrometheusResponseFieldNames(PrometheusLogicalMetricAgg node, - PrometheusMetricScan context) { + private void setPrometheusResponseFieldNames( + PrometheusLogicalMetricAgg node, PrometheusMetricScan context) { Optional spanExpression = getSpanExpression(node.getGroupByList()); if (spanExpression.isEmpty()) { throw new RuntimeException( @@ -133,6 +131,4 @@ private Optional getSpanExpression(List namedE .filter(expression -> expression.getDelegated() instanceof SpanExpression) .findFirst(); } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java index d3a6ef184f..303ace7906 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java @@ -17,7 +17,6 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.NamedExpression; - @Getter @Setter public class PrometheusResponseFieldNames { @@ -26,5 +25,4 @@ public class PrometheusResponseFieldNames { private ExprType valueType = DOUBLE; private String timestampFieldName = TIMESTAMP; private List groupByList; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java index 86ca99cea8..02187c5662 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java @@ -21,5 +21,4 @@ public class QueryRangeParameters { private Long start; private Long end; private String step; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java index a141707077..540e2d8cf4 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java @@ -18,49 +18,51 @@ import org.opensearch.sql.expression.span.SpanExpression; /** - * This class builds aggregation query for the given stats commands. - * In the generated query a placeholder(%s) is added in place of metric selection query - * and later replaced by metric selection query. + * This class builds aggregation query for the given stats commands. In the generated query a + * placeholder(%s) is added in place of metric selection query and later replaced by metric + * selection query. */ @NoArgsConstructor public class AggregationQueryBuilder { - private static final Set allowedStatsFunctions = Set.of( - BuiltinFunctionName.MAX.getName().getFunctionName(), - BuiltinFunctionName.MIN.getName().getFunctionName(), - BuiltinFunctionName.COUNT.getName().getFunctionName(), - BuiltinFunctionName.SUM.getName().getFunctionName(), - BuiltinFunctionName.AVG.getName().getFunctionName() - ); - + private static final Set allowedStatsFunctions = + Set.of( + BuiltinFunctionName.MAX.getName().getFunctionName(), + BuiltinFunctionName.MIN.getName().getFunctionName(), + BuiltinFunctionName.COUNT.getName().getFunctionName(), + BuiltinFunctionName.SUM.getName().getFunctionName(), + BuiltinFunctionName.AVG.getName().getFunctionName()); /** * Build Aggregation query from series selector query from expression. * * @return query string. */ - public static String build(List namedAggregatorList, - List groupByList) { + public static String build( + List namedAggregatorList, List groupByList) { if (namedAggregatorList.size() > 1) { throw new RuntimeException( "Prometheus Catalog doesn't multiple aggregations in stats command"); } - if (!allowedStatsFunctions - .contains(namedAggregatorList.get(0).getFunctionName().getFunctionName())) { - throw new RuntimeException(String.format( - "Prometheus Catalog only supports %s aggregations.", allowedStatsFunctions)); + if (!allowedStatsFunctions.contains( + namedAggregatorList.get(0).getFunctionName().getFunctionName())) { + throw new RuntimeException( + String.format( + "Prometheus Catalog only supports %s aggregations.", allowedStatsFunctions)); } StringBuilder aggregateQuery = new StringBuilder(); - aggregateQuery.append(namedAggregatorList.get(0).getFunctionName().getFunctionName()) + aggregateQuery + .append(namedAggregatorList.get(0).getFunctionName().getFunctionName()) .append(" "); if (groupByList != null && !groupByList.isEmpty()) { - groupByList = groupByList.stream() - .filter(expression -> !(expression.getDelegated() instanceof SpanExpression)) - .collect(Collectors.toList()); + groupByList = + groupByList.stream() + .filter(expression -> !(expression.getDelegated() instanceof SpanExpression)) + .collect(Collectors.toList()); if (groupByList.size() > 0) { aggregateQuery.append("by("); aggregateQuery.append( @@ -78,5 +80,4 @@ public static String build(List namedAggregatorList, .append("(%s))"); return aggregateQuery.toString(); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java index 461b5341f8..d824fcb5b3 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java @@ -7,7 +7,6 @@ package org.opensearch.sql.prometheus.storage.querybuilder; - import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.TIMESTAMP; import java.util.stream.Collectors; @@ -19,14 +18,10 @@ import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.function.BuiltinFunctionName; -/** - * This class builds metric selection query from the filter condition - * and metric name. - */ +/** This class builds metric selection query from the filter condition and metric name. */ @NoArgsConstructor public class SeriesSelectionQueryBuilder { - /** * Build Prometheus series selector query from expression. * @@ -35,8 +30,8 @@ public class SeriesSelectionQueryBuilder { */ public static String build(String metricName, Expression filterCondition) { if (filterCondition != null) { - SeriesSelectionExpressionNodeVisitor seriesSelectionExpressionNodeVisitor - = new SeriesSelectionExpressionNodeVisitor(); + SeriesSelectionExpressionNodeVisitor seriesSelectionExpressionNodeVisitor = + new SeriesSelectionExpressionNodeVisitor(); String selectorQuery = filterCondition.accept(seriesSelectionExpressionNodeVisitor, null); if (selectorQuery != null) { return metricName + "{" + selectorQuery + "}"; @@ -54,9 +49,9 @@ public String visitFunction(FunctionExpression func, Object context) { .filter(StringUtils::isNotEmpty) .collect(Collectors.joining(" , ")); } else if ((BuiltinFunctionName.LTE.getName().equals(func.getFunctionName()) - || BuiltinFunctionName.GTE.getName().equals(func.getFunctionName()) - || BuiltinFunctionName.LESS.getName().equals(func.getFunctionName()) - || BuiltinFunctionName.GREATER.getName().equals(func.getFunctionName())) + || BuiltinFunctionName.GTE.getName().equals(func.getFunctionName()) + || BuiltinFunctionName.LESS.getName().equals(func.getFunctionName()) + || BuiltinFunctionName.GREATER.getName().equals(func.getFunctionName())) && ((ReferenceExpression) func.getArguments().get(0)).getAttr().equals(TIMESTAMP)) { return null; } else if (BuiltinFunctionName.EQUAL.getName().equals(func.getFunctionName())) { @@ -65,11 +60,10 @@ public String visitFunction(FunctionExpression func, Object context) { + func.getArguments().get(1); } else { throw new RuntimeException( - String.format("Prometheus Datasource doesn't support %s " - + "in where command.", + String.format( + "Prometheus Datasource doesn't support %s " + "in where command.", func.getFunctionName().getFunctionName())); } } } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java index 2078950a5d..4c23ea9086 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java @@ -15,25 +15,20 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.span.SpanExpression; -/** - * This class resolves step parameter required for - * query_range api of prometheus. - */ +/** This class resolves step parameter required for query_range api of prometheus. */ @NoArgsConstructor public class StepParameterResolver { /** - * Extract step from groupByList or apply heuristic arithmetic - * on endTime and startTime. - * + * Extract step from groupByList or apply heuristic arithmetic on endTime and startTime. * * @param startTime startTime. * @param endTime endTime. * @param groupByList groupByList. * @return Step String. */ - public static String resolve(@NonNull Long startTime, @NonNull Long endTime, - List groupByList) { + public static String resolve( + @NonNull Long startTime, @NonNull Long endTime, List groupByList) { Optional spanExpression = getSpanExpression(groupByList); if (spanExpression.isPresent()) { if (StringUtils.isEmpty(spanExpression.get().getUnit().getName())) { @@ -48,7 +43,7 @@ public static String resolve(@NonNull Long startTime, @NonNull Long endTime, } private static Optional getSpanExpression( - List namedExpressionList) { + List namedExpressionList) { if (namedExpressionList == null) { return Optional.empty(); } @@ -57,7 +52,4 @@ private static Optional getSpanExpression( .map(expression -> (SpanExpression) expression.getDelegated()) .findFirst(); } - - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java index b462f6bafe..c7766f22d6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java @@ -22,16 +22,14 @@ @NoArgsConstructor public class TimeRangeParametersResolver extends ExpressionNodeVisitor { - private Long startTime; private Long endTime; /** - * Build Range Query Parameters from filter expression. - * If the filter condition consists of @timestamp, startTime and - * endTime are derived. or else it will be defaulted to now() and now()-1hr. - * If one of starttime and endtime are provided, the other will be derived from them - * by fixing the time range duration to 1hr. + * Build Range Query Parameters from filter expression. If the filter condition consists + * of @timestamp, startTime and endTime are derived. or else it will be defaulted to now() and + * now()-1hr. If one of starttime and endtime are provided, the other will be derived from them by + * fixing the time range duration to 1hr. * * @param filterCondition expression. * @return query string @@ -72,13 +70,10 @@ public Void visitFunction(FunctionExpression func, Object context) { } } } else { - func.getArguments() - .stream() + func.getArguments().stream() .filter(arg -> arg instanceof FunctionExpression) .forEach(arg -> visitFunction((FunctionExpression) arg, context)); } return null; } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java index dca946da57..b5557e7298 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java @@ -5,7 +5,6 @@ package org.opensearch.sql.prometheus.storage.system; - import static org.opensearch.sql.utils.SystemIndexUtils.systemTable; import com.google.common.annotations.VisibleForTesting; @@ -25,13 +24,9 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; -/** - * Prometheus System Table Implementation. - */ +/** Prometheus System Table Implementation. */ public class PrometheusSystemTable implements Table { - /** - * System Index Name. - */ + /** System Index Name. */ private final Pair systemIndexBundle; private final DataSourceSchemaName dataSourceSchemaName; @@ -54,8 +49,7 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public class PrometheusSystemTableDefaultImplementor - extends DefaultImplementor { + public class PrometheusSystemTableDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitRelation(LogicalRelation node, Object context) { @@ -67,12 +61,14 @@ private Pair buildIndexBun PrometheusClient client, String indexName) { SystemIndexUtils.SystemTable systemTable = systemTable(indexName); if (systemTable.isSystemInfoTable()) { - return Pair.of(PrometheusSystemTableSchema.SYS_TABLE_TABLES, + return Pair.of( + PrometheusSystemTableSchema.SYS_TABLE_TABLES, new PrometheusListMetricsRequest(client, dataSourceSchemaName)); } else { - return Pair.of(PrometheusSystemTableSchema.SYS_TABLE_MAPPINGS, - new PrometheusDescribeMetricRequest(client, - dataSourceSchemaName, systemTable.getTableName())); + return Pair.of( + PrometheusSystemTableSchema.SYS_TABLE_MAPPINGS, + new PrometheusDescribeMetricRequest( + client, dataSourceSchemaName, systemTable.getTableName())); } } } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java index 5c0bc656fe..907e8a0c15 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java @@ -13,16 +13,13 @@ import org.opensearch.sql.prometheus.request.system.PrometheusSystemRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * Prometheus table scan operator. - */ +/** Prometheus table scan operator. */ @RequiredArgsConstructor @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class PrometheusSystemTableScan extends TableScanOperator { - @EqualsAndHashCode.Include - private final PrometheusSystemRequest request; + @EqualsAndHashCode.Include private final PrometheusSystemRequest request; private Iterator iterator; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java index 668a208c79..9272731dce 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java @@ -18,22 +18,23 @@ @Getter @RequiredArgsConstructor public enum PrometheusSystemTableSchema { - - SYS_TABLE_TABLES(new ImmutableMap.Builder() - .put("TABLE_CATALOG", STRING) - .put("TABLE_SCHEMA", STRING) - .put("TABLE_NAME", STRING) - .put("TABLE_TYPE", STRING) - .put("UNIT", STRING) - .put("REMARKS", STRING) - .build()), - SYS_TABLE_MAPPINGS(new ImmutableMap.Builder() - .put("TABLE_CATALOG", STRING) - .put("TABLE_SCHEMA", STRING) - .put("TABLE_NAME", STRING) - .put("COLUMN_NAME", STRING) - .put("DATA_TYPE", STRING) - .build()); + SYS_TABLE_TABLES( + new ImmutableMap.Builder() + .put("TABLE_CATALOG", STRING) + .put("TABLE_SCHEMA", STRING) + .put("TABLE_NAME", STRING) + .put("TABLE_TYPE", STRING) + .put("UNIT", STRING) + .put("REMARKS", STRING) + .build()), + SYS_TABLE_MAPPINGS( + new ImmutableMap.Builder() + .put("TABLE_CATALOG", STRING) + .put("TABLE_SCHEMA", STRING) + .put("TABLE_NAME", STRING) + .put("COLUMN_NAME", STRING) + .put("DATA_TYPE", STRING) + .build()); private final Map mapping; } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java index 35edc83614..24bec1ede3 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java @@ -16,52 +16,54 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.NamedArgumentExpression; -/** - * Utility class for common table function methods. - */ +/** Utility class for common table function methods. */ @UtilityClass public class TableFunctionUtils { /** - * Validates if function arguments are valid - * in both the cases when the arguments are passed by position or name. + * Validates if function arguments are valid in both the cases when the arguments are passed by + * position or name. * * @param arguments arguments of function provided in the input order. * @param argumentNames ordered argument names of the function. */ - public static void validatePrometheusTableFunctionArguments(List arguments, - List argumentNames) { - Boolean argumentsPassedByName = arguments.stream() - .noneMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); - Boolean argumentsPassedByPosition = arguments.stream() - .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + public static void validatePrometheusTableFunctionArguments( + List arguments, List argumentNames) { + Boolean argumentsPassedByName = + arguments.stream() + .noneMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + Boolean argumentsPassedByPosition = + arguments.stream() + .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); if (!(argumentsPassedByName || argumentsPassedByPosition)) { throw new SemanticCheckException("Arguments should be either passed by name or position"); } if (arguments.size() != argumentNames.size()) { throw new SemanticCheckException( - generateErrorMessageForMissingArguments(argumentsPassedByPosition, arguments, - argumentNames)); + generateErrorMessageForMissingArguments( + argumentsPassedByPosition, arguments, argumentNames)); } } /** - * Get Named Arguments of Table Function Arguments. - * If they are passed by position create new ones or else return the same arguments passed. + * Get Named Arguments of Table Function Arguments. If they are passed by position create new ones + * or else return the same arguments passed. * * @param arguments arguments of function provided in the input order. * @param argumentNames ordered argument names of the function. */ - public static List getNamedArgumentsOfTableFunction(List arguments, - List argumentNames) { - boolean argumentsPassedByPosition = arguments.stream() - .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + public static List getNamedArgumentsOfTableFunction( + List arguments, List argumentNames) { + boolean argumentsPassedByPosition = + arguments.stream() + .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); if (argumentsPassedByPosition) { List namedArguments = new ArrayList<>(); for (int i = 0; i < arguments.size(); i++) { - namedArguments.add(new NamedArgumentExpression(argumentNames.get(i), - ((NamedArgumentExpression) arguments.get(i)).getValue())); + namedArguments.add( + new NamedArgumentExpression( + argumentNames.get(i), ((NamedArgumentExpression) arguments.get(i)).getValue())); } return namedArguments; } @@ -73,17 +75,17 @@ private static String generateErrorMessageForMissingArguments( List arguments, List argumentNames) { if (areArgumentsPassedByPosition) { - return String.format("Missing arguments:[%s]", + return String.format( + "Missing arguments:[%s]", String.join(",", argumentNames.subList(arguments.size(), argumentNames.size()))); } else { Set requiredArguments = new HashSet<>(argumentNames); Set providedArguments = - arguments.stream().map(expression -> ((NamedArgumentExpression) expression).getArgName()) + arguments.stream() + .map(expression -> ((NamedArgumentExpression) expression).getArgName()) .collect(Collectors.toSet()); requiredArguments.removeAll(providedArguments); return String.format("Missing arguments:[%s]", String.join(",", requiredArguments)); } } - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java index b26a45e301..735a1a1052 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java @@ -43,7 +43,6 @@ public class PrometheusClientImplTest { private MockWebServer mockWebServer; private PrometheusClient prometheusClient; - @BeforeEach void setUp() throws IOException { this.mockWebServer = new MockWebServer(); @@ -52,13 +51,13 @@ void setUp() throws IOException { new PrometheusClientImpl(new OkHttpClient(), mockWebServer.url("").uri().normalize()); } - @Test @SneakyThrows void testQueryRange() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("query_range_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("query_range_response.json")); mockWebServer.enqueue(mockResponse); JSONObject jsonObject = prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP); assertTrue(new JSONObject(getJson("query_range_result.json")).similar(jsonObject)); @@ -69,13 +68,15 @@ void testQueryRange() { @Test @SneakyThrows void testQueryRangeWith2xxStatusAndError() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("error_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("error_response.json")); mockWebServer.enqueue(mockResponse); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, - () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); + RuntimeException runtimeException = + assertThrows( + RuntimeException.class, + () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); assertEquals("Error", runtimeException.getMessage()); RecordedRequest recordedRequest = mockWebServer.takeRequest(); verifyQueryRangeCall(recordedRequest); @@ -84,13 +85,15 @@ void testQueryRangeWith2xxStatusAndError() { @Test @SneakyThrows void testQueryRangeWithNon2xxError() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setResponseCode(400); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setResponseCode(400); mockWebServer.enqueue(mockResponse); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, - () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); + RuntimeException runtimeException = + assertThrows( + RuntimeException.class, + () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); assertTrue( runtimeException.getMessage().contains("Request to Prometheus is Unsuccessful with :")); RecordedRequest recordedRequest = mockWebServer.takeRequest(); @@ -100,16 +103,20 @@ void testQueryRangeWithNon2xxError() { @Test @SneakyThrows void testGetLabel() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("get_labels_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("get_labels_response.json")); mockWebServer.enqueue(mockResponse); List response = prometheusClient.getLabels(METRIC_NAME); - assertEquals(new ArrayList() {{ - add("call"); - add("code"); - } - }, response); + assertEquals( + new ArrayList() { + { + add("call"); + add("code"); + } + }, + response); RecordedRequest recordedRequest = mockWebServer.takeRequest(); verifyGetLabelsCall(recordedRequest); } @@ -117,30 +124,34 @@ void testGetLabel() { @Test @SneakyThrows void testGetAllMetrics() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("all_metrics_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("all_metrics_response.json")); mockWebServer.enqueue(mockResponse); Map> response = prometheusClient.getAllMetrics(); Map> expected = new HashMap<>(); - expected.put("go_gc_duration_seconds", - Collections.singletonList(new MetricMetadata("summary", - "A summary of the pause duration of garbage collection cycles.", ""))); - expected.put("go_goroutines", - Collections.singletonList(new MetricMetadata("gauge", - "Number of goroutines that currently exist.", ""))); + expected.put( + "go_gc_duration_seconds", + Collections.singletonList( + new MetricMetadata( + "summary", "A summary of the pause duration of garbage collection cycles.", ""))); + expected.put( + "go_goroutines", + Collections.singletonList( + new MetricMetadata("gauge", "Number of goroutines that currently exist.", ""))); assertEquals(expected, response); RecordedRequest recordedRequest = mockWebServer.takeRequest(); verifyGetAllMetricsCall(recordedRequest); } - @Test @SneakyThrows void testQueryExemplars() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("query_exemplars_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("query_exemplars_response.json")); mockWebServer.enqueue(mockResponse); JSONArray jsonArray = prometheusClient.queryExemplars(QUERY, STARTTIME, ENDTIME); assertTrue(new JSONArray(getJson("query_exemplars_result.json")).similar(jsonArray)); diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java index d6e4a5cef4..6009d3229c 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java @@ -25,29 +25,31 @@ import org.opensearch.sql.prometheus.request.PrometheusQueryExemplarsRequest; import org.opensearch.sql.prometheus.storage.QueryExemplarsTable; - @ExtendWith(MockitoExtension.class) class QueryExemplarsFunctionImplementationTest { - @Mock - private PrometheusClient client; - + @Mock private PrometheusClient client; @Test void testValueOfAndTypeAndToString() { FunctionName functionName = new FunctionName("query_exemplars"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345))); - QueryExemplarFunctionImplementation queryExemplarFunctionImplementation - = + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345))); + QueryExemplarFunctionImplementation queryExemplarFunctionImplementation = new QueryExemplarFunctionImplementation(functionName, namedArgumentExpressionList, client); - UnsupportedOperationException exception = assertThrows(UnsupportedOperationException.class, - () -> queryExemplarFunctionImplementation.valueOf()); - assertEquals("Prometheus defined function [query_exemplars] is only " - + "supported in SOURCE clause with prometheus connector catalog", exception.getMessage()); - assertEquals("query_exemplars(query=\"http_latency\", starttime=12345, endtime=12345)", + UnsupportedOperationException exception = + assertThrows( + UnsupportedOperationException.class, + () -> queryExemplarFunctionImplementation.valueOf()); + assertEquals( + "Prometheus defined function [query_exemplars] is only " + + "supported in SOURCE clause with prometheus connector catalog", + exception.getMessage()); + assertEquals( + "query_exemplars(query=\"http_latency\", starttime=12345, endtime=12345)", queryExemplarFunctionImplementation.toString()); assertEquals(ExprCoreType.STRUCT, queryExemplarFunctionImplementation.type()); } @@ -55,15 +57,15 @@ void testValueOfAndTypeAndToString() { @Test void testApplyArguments() { FunctionName functionName = new FunctionName("query_exemplars"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(1234))); - QueryExemplarFunctionImplementation queryExemplarFunctionImplementation - = + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(1234))); + QueryExemplarFunctionImplementation queryExemplarFunctionImplementation = new QueryExemplarFunctionImplementation(functionName, namedArgumentExpressionList, client); - QueryExemplarsTable queryExemplarsTable - = (QueryExemplarsTable) queryExemplarFunctionImplementation.applyArguments(); + QueryExemplarsTable queryExemplarsTable = + (QueryExemplarsTable) queryExemplarFunctionImplementation.applyArguments(); assertNotNull(queryExemplarsTable.getExemplarsRequest()); PrometheusQueryExemplarsRequest request = queryExemplarsTable.getExemplarsRequest(); assertEquals("http_latency", request.getQuery()); @@ -74,17 +76,17 @@ void testApplyArguments() { @Test void testApplyArgumentsException() { FunctionName functionName = new FunctionName("query_exemplars"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("end_time", DSL.literal(1234))); - QueryExemplarFunctionImplementation queryExemplarFunctionImplementation - = + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("end_time", DSL.literal(1234))); + QueryExemplarFunctionImplementation queryExemplarFunctionImplementation = new QueryExemplarFunctionImplementation(functionName, namedArgumentExpressionList, client); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> queryExemplarFunctionImplementation.applyArguments()); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> queryExemplarFunctionImplementation.applyArguments()); assertEquals("Invalid Function Argument:end_time", exception.getMessage()); } - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java index 48337e3f02..288bc35b0f 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java @@ -26,29 +26,31 @@ import org.opensearch.sql.prometheus.request.PrometheusQueryRequest; import org.opensearch.sql.prometheus.storage.PrometheusMetricTable; - @ExtendWith(MockitoExtension.class) class QueryRangeFunctionImplementationTest { - @Mock - private PrometheusClient client; - + @Mock private PrometheusClient client; @Test void testValueOfAndTypeAndToString() { FunctionName functionName = new FunctionName("query_range"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345)), - DSL.namedArgument("step", DSL.literal(14))); - QueryRangeFunctionImplementation queryRangeFunctionImplementation - = new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); - UnsupportedOperationException exception = assertThrows(UnsupportedOperationException.class, - () -> queryRangeFunctionImplementation.valueOf()); - assertEquals("Prometheus defined function [query_range] is only " - + "supported in SOURCE clause with prometheus connector catalog", exception.getMessage()); - assertEquals("query_range(query=\"http_latency\", starttime=12345, endtime=12345, step=14)", + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345)), + DSL.namedArgument("step", DSL.literal(14))); + QueryRangeFunctionImplementation queryRangeFunctionImplementation = + new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); + UnsupportedOperationException exception = + assertThrows( + UnsupportedOperationException.class, () -> queryRangeFunctionImplementation.valueOf()); + assertEquals( + "Prometheus defined function [query_range] is only " + + "supported in SOURCE clause with prometheus connector catalog", + exception.getMessage()); + assertEquals( + "query_range(query=\"http_latency\", starttime=12345, endtime=12345, step=14)", queryRangeFunctionImplementation.toString()); assertEquals(ExprCoreType.STRUCT, queryRangeFunctionImplementation.type()); } @@ -56,19 +58,20 @@ void testValueOfAndTypeAndToString() { @Test void testApplyArguments() { FunctionName functionName = new FunctionName("query_range"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(1234)), - DSL.namedArgument("step", DSL.literal(14))); - QueryRangeFunctionImplementation queryRangeFunctionImplementation - = new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) queryRangeFunctionImplementation.applyArguments(); + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(1234)), + DSL.namedArgument("step", DSL.literal(14))); + QueryRangeFunctionImplementation queryRangeFunctionImplementation = + new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) queryRangeFunctionImplementation.applyArguments(); assertNull(prometheusMetricTable.getMetricName()); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); - PrometheusQueryRequest prometheusQueryRequest - = prometheusMetricTable.getPrometheusQueryRequest(); + PrometheusQueryRequest prometheusQueryRequest = + prometheusMetricTable.getPrometheusQueryRequest(); assertEquals("http_latency", prometheusQueryRequest.getPromQl().toString()); assertEquals(12345, prometheusQueryRequest.getStartTime()); assertEquals(1234, prometheusQueryRequest.getEndTime()); @@ -78,17 +81,18 @@ void testApplyArguments() { @Test void testApplyArgumentsException() { FunctionName functionName = new FunctionName("query_range"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("end_time", DSL.literal(1234)), - DSL.namedArgument("step", DSL.literal(14))); - QueryRangeFunctionImplementation queryRangeFunctionImplementation - = new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> queryRangeFunctionImplementation.applyArguments()); + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("end_time", DSL.literal(1234)), + DSL.namedArgument("step", DSL.literal(14))); + QueryRangeFunctionImplementation queryRangeFunctionImplementation = + new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> queryRangeFunctionImplementation.applyArguments()); assertEquals("Invalid Function Argument:end_time", exception.getMessage()); } - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java index 3e26b46c8f..af8ebf48e2 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java @@ -35,34 +35,34 @@ @ExtendWith(MockitoExtension.class) class QueryExemplarsTableFunctionResolverTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; - @Mock - private FunctionProperties functionProperties; + @Mock private FunctionProperties functionProperties; @Test void testResolve() { - QueryExemplarsTableFunctionResolver queryExemplarsTableFunctionResolver - = new QueryExemplarsTableFunctionResolver(client); + QueryExemplarsTableFunctionResolver queryExemplarsTableFunctionResolver = + new QueryExemplarsTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_exemplars"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryExemplarsTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryExemplarsTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryExemplarsTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryExemplarFunctionImplementation); - QueryExemplarsTable queryExemplarsTable - = (QueryExemplarsTable) functionImplementation.applyArguments(); + QueryExemplarsTable queryExemplarsTable = + (QueryExemplarsTable) functionImplementation.applyArguments(); assertNotNull(queryExemplarsTable.getExemplarsRequest()); PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = queryExemplarsTable.getExemplarsRequest(); @@ -70,5 +70,4 @@ void testResolve() { assertEquals(12345L, prometheusQueryExemplarsRequest.getStartTime()); assertEquals(12345L, prometheusQueryExemplarsRequest.getEndTime()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java index 2a36600379..48050bcb15 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java @@ -37,35 +37,35 @@ @ExtendWith(MockitoExtension.class) class QueryRangeTableFunctionResolverTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; - @Mock - private FunctionProperties functionProperties; + @Mock private FunctionProperties functionProperties; @Test void testResolve() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345)), - DSL.namedArgument("step", DSL.literal(14))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345)), + DSL.namedArgument("step", DSL.literal(14))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryRangeFunctionImplementation); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) functionImplementation.applyArguments(); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) functionImplementation.applyArguments(); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); PrometheusQueryRequest prometheusQueryRequest = prometheusMetricTable.getPrometheusQueryRequest(); @@ -77,29 +77,31 @@ void testResolve() { @Test void testArgumentsPassedByPosition() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument(null, DSL.literal("http_latency")), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(14))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument(null, DSL.literal("http_latency")), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(14))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryRangeFunctionImplementation); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) functionImplementation.applyArguments(); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) functionImplementation.applyArguments(); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); PrometheusQueryRequest prometheusQueryRequest = prometheusMetricTable.getPrometheusQueryRequest(); @@ -109,32 +111,33 @@ void testArgumentsPassedByPosition() { assertEquals("14", prometheusQueryRequest.getStep()); } - @Test void testArgumentsPassedByNameWithDifferentOrder() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("endtime", DSL.literal(12345)), - DSL.namedArgument("step", DSL.literal(14)), - DSL.namedArgument("starttime", DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("endtime", DSL.literal(12345)), + DSL.namedArgument("step", DSL.literal(14)), + DSL.namedArgument("starttime", DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryRangeFunctionImplementation); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) functionImplementation.applyArguments(); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) functionImplementation.applyArguments(); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); PrometheusQueryRequest prometheusQueryRequest = prometheusMetricTable.getPrometheusQueryRequest(); @@ -146,70 +149,81 @@ void testArgumentsPassedByNameWithDifferentOrder() { @Test void testMixedArgumentTypes() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(14))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(14))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Arguments should be either passed by name or position", exception.getMessage()); } @Test void testWrongArgumentsSizeWhenPassedByName() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("step", DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("step", DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Missing arguments:[endtime,starttime]", exception.getMessage()); } @Test void testWrongArgumentsSizeWhenPassedByPosition() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument(null, DSL.literal("http_latency")), - DSL.namedArgument(null, DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument(null, DSL.literal("http_latency")), + DSL.namedArgument(null, DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Missing arguments:[endtime,step]", exception.getMessage()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java index 6fd782b417..bb7806f824 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.prometheus.functions.scan; - import static org.opensearch.sql.prometheus.constants.TestConstants.ENDTIME; import static org.opensearch.sql.prometheus.constants.TestConstants.QUERY; import static org.opensearch.sql.prometheus.constants.TestConstants.STARTTIME; @@ -22,40 +21,35 @@ public class QueryExemplarsFunctionTableScanBuilderTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; - @Mock - private LogicalProject logicalProject; + @Mock private LogicalProject logicalProject; @Test void testBuild() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder - = new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); - TableScanOperator queryExemplarsFunctionTableScanOperator - = queryExemplarsFunctionTableScanBuilder.build(); + QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder = + new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); + TableScanOperator queryExemplarsFunctionTableScanOperator = + queryExemplarsFunctionTableScanBuilder.build(); Assertions.assertNotNull(queryExemplarsFunctionTableScanOperator); - Assertions.assertTrue(queryExemplarsFunctionTableScanOperator - instanceof QueryExemplarsFunctionTableScanOperator); + Assertions.assertTrue( + queryExemplarsFunctionTableScanOperator instanceof QueryExemplarsFunctionTableScanOperator); } @Test void testPushProject() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder - = new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); - Assertions.assertTrue(queryExemplarsFunctionTableScanBuilder - .pushDownProject(logicalProject)); + QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder = + new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); + Assertions.assertTrue(queryExemplarsFunctionTableScanBuilder.pushDownProject(logicalProject)); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java index d4e31d4d1e..5b8cf34fc2 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java @@ -41,22 +41,21 @@ @ExtendWith(MockitoExtension.class) public class QueryExemplarsFunctionTableScanOperatorTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows void testQueryResponseIterator() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenReturn(new JSONArray(getJson("query_exemplars_result.json"))); @@ -68,24 +67,28 @@ void testQueryResponseIterator() { seriesLabelsHashMap.put("service", new ExprStringValue("bar")); seriesLabelsHashMap.put("job", new ExprStringValue("prometheus")); LinkedHashMap exemplarMap = new LinkedHashMap<>(); - exemplarMap.put("labels", new ExprTupleValue(new LinkedHashMap<>() { - { - put("traceID", new ExprStringValue("EpTxMJ40fUus7aGY")); - } - }) - ); + exemplarMap.put( + "labels", + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("traceID", new ExprStringValue("EpTxMJ40fUus7aGY")); + } + })); exemplarMap.put("timestamp", new ExprTimestampValue(Instant.ofEpochMilli(1600096945479L))); exemplarMap.put("value", new ExprDoubleValue(6)); List exprValueList = new ArrayList<>(); exprValueList.add(new ExprTupleValue(exemplarMap)); ExprCollectionValue exemplars = new ExprCollectionValue(exprValueList); ExprTupleValue seriesLabels = new ExprTupleValue(seriesLabelsHashMap); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() { - { - put("seriesLabels", seriesLabels); - put("exemplars", exemplars); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("seriesLabels", seriesLabels); + put("exemplars", exemplars); + } + }); assertEquals(firstRow, queryExemplarsFunctionTableScanOperator.next()); } @@ -93,15 +96,15 @@ void testQueryResponseIterator() { @Test @SneakyThrows void testEmptyQueryWithNoMatrixKeyInResultJson() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenReturn(new JSONArray(getJson("query_exemplars_empty_result.json"))); @@ -113,15 +116,15 @@ void testEmptyQueryWithNoMatrixKeyInResultJson() { @SneakyThrows void testQuerySchema() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenReturn(new JSONArray(getJson("query_exemplars_result.json"))); @@ -140,53 +143,53 @@ void testQuerySchema() { @SneakyThrows void testEmptyQueryWithException() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, queryExemplarsFunctionTableScanOperator::open); - assertEquals("Error fetching data from prometheus server: Error Message", - runtimeException.getMessage()); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, queryExemplarsFunctionTableScanOperator::open); + assertEquals( + "Error fetching data from prometheus server: Error Message", runtimeException.getMessage()); } - @Test @SneakyThrows void testExplain() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); - Assertions.assertEquals("query_exemplars(test_query, 1664767694133, 1664771294133)", + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); + Assertions.assertEquals( + "query_exemplars(test_query, 1664767694133, 1664771294133)", queryExemplarsFunctionTableScanOperator.explain()); } @Test @SneakyThrows void testClose() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); queryExemplarsFunctionTableScanOperator.close(); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java index 8532a35395..dca79d6905 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.prometheus.functions.scan; - import static org.opensearch.sql.prometheus.constants.TestConstants.ENDTIME; import static org.opensearch.sql.prometheus.constants.TestConstants.QUERY; import static org.opensearch.sql.prometheus.constants.TestConstants.STARTTIME; @@ -23,11 +22,9 @@ public class QueryRangeFunctionTableScanBuilderTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; - @Mock - private LogicalProject logicalProject; + @Mock private LogicalProject logicalProject; @Test void testBuild() { @@ -37,13 +34,13 @@ void testBuild() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder - = new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); - TableScanOperator queryRangeFunctionTableScanOperator - = queryRangeFunctionTableScanBuilder.build(); + QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder = + new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); + TableScanOperator queryRangeFunctionTableScanOperator = + queryRangeFunctionTableScanBuilder.build(); Assertions.assertNotNull(queryRangeFunctionTableScanOperator); - Assertions.assertTrue(queryRangeFunctionTableScanOperator - instanceof QueryRangeFunctionTableScanOperator); + Assertions.assertTrue( + queryRangeFunctionTableScanOperator instanceof QueryRangeFunctionTableScanOperator); } @Test @@ -54,8 +51,8 @@ void testPushProject() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder - = new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder = + new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); Assertions.assertTrue(queryRangeFunctionTableScanBuilder.pushDownProject(logicalProject)); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java index b476471153..e59a2bf7c4 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java @@ -45,8 +45,7 @@ @ExtendWith(MockitoExtension.class) class QueryRangeFunctionTableScanOperatorTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows @@ -58,41 +57,63 @@ void testQueryResponseIterator() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("query_range_result.json"))); queryRangeFunctionTableScanOperator.open(); Assertions.assertTrue(queryRangeFunctionTableScanOperator.hasNext()); - LinkedHashMap labelsMap = new LinkedHashMap<>() {{ - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - }}; - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(LABELS, new ExprTupleValue(labelsMap)); - put(TIMESTAMP, new ExprCollectionValue(Collections - .singletonList(new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); - put(VALUE, new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(1)))); - } - }); + LinkedHashMap labelsMap = + new LinkedHashMap<>() { + { + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }; + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(LABELS, new ExprTupleValue(labelsMap)); + put( + TIMESTAMP, + new ExprCollectionValue( + Collections.singletonList( + new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); + put( + VALUE, + new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(1)))); + } + }); assertEquals(firstRow, queryRangeFunctionTableScanOperator.next()); Assertions.assertTrue(queryRangeFunctionTableScanOperator.hasNext()); - LinkedHashMap labelsMap2 = new LinkedHashMap<>() {{ - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - }}; - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(LABELS, new ExprTupleValue(labelsMap2)); - put(TIMESTAMP, new ExprCollectionValue(Collections - .singletonList(new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); - put(VALUE, new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(0)))); - } - }); + LinkedHashMap labelsMap2 = + new LinkedHashMap<>() { + { + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }; + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(LABELS, new ExprTupleValue(labelsMap2)); + put( + TIMESTAMP, + new ExprCollectionValue( + Collections.singletonList( + new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); + put( + VALUE, + new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(0)))); + } + }); assertEquals(secondRow, queryRangeFunctionTableScanOperator.next()); Assertions.assertFalse(queryRangeFunctionTableScanOperator.hasNext()); } @@ -106,16 +127,17 @@ void testEmptyQueryWithNoMatrixKeyInResultJson() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("no_matrix_query_range_result.json"))); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); assertEquals( "Unexpected Result Type: vector during Prometheus Response Parsing. " - + "'matrix' resultType is expected", runtimeException.getMessage()); + + "'matrix' resultType is expected", + runtimeException.getMessage()); } @Test @@ -127,8 +149,8 @@ void testQuerySchema() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("query_range_result.json"))); @@ -150,18 +172,17 @@ void testEmptyQueryWithException() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); - assertEquals("Error fetching data from prometheus server: Error Message", - runtimeException.getMessage()); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); + assertEquals( + "Error fetching data from prometheus server: Error Message", runtimeException.getMessage()); } - @Test @SneakyThrows void testExplain() { @@ -171,10 +192,11 @@ void testExplain() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); - Assertions.assertEquals("query_range(test_query, 1664767694133, 1664771294133, 14)", + Assertions.assertEquals( + "query_range(test_query, 1664767694133, 1664771294133, 14)", queryRangeFunctionTableScanOperator.explain()); } @@ -187,8 +209,8 @@ void testClose() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); queryRangeFunctionTableScanOperator.close(); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java index a1d1cef91d..33c48e2f2d 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java @@ -32,60 +32,50 @@ @ExtendWith(MockitoExtension.class) public class PrometheusLogicOptimizerTest { - @Mock - private Table table; + @Mock private Table table; @Test void project_filter_merge_with_relation() { assertEquals( project( - indexScan("prometheus_http_total_requests", - DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200")))) - ), + indexScan( + "prometheus_http_total_requests", + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))))), optimize( project( filter( relation("prometheus_http_total_requests", table), - DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))) - )) - ) - ); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))))))); } @Test void aggregation_merge_relation() { assertEquals( project( - indexScanAgg("prometheus_http_total_requests", ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), + indexScanAgg( + "prometheus_http_total_requests", + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), ImmutableList.of(DSL.named("code", DSL.ref("code", STRING)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), optimize( project( aggregation( relation("prometheus_http_total_requests", table), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(DSL.named("code", - DSL.ref("code", STRING)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))) - ) - ); + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(DSL.named("code", DSL.ref("code", STRING)))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))))); } - @Test void aggregation_merge_filter_relation() { assertEquals( project( - indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), ImmutableList.of(DSL.named("job", DSL.ref("job", STRING)))), DSL.named("AVG(@value)", DSL.ref("AVG(@value)", DOUBLE))), optimize( @@ -94,25 +84,16 @@ void aggregation_merge_filter_relation() { filter( relation("prometheus_http_total_requests", table), DSL.and( - DSL.equal(DSL.ref("code", STRING), - DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), - DSL.literal(stringValue("/ready/")))) - ), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(DSL.named("job", - DSL.ref("job", STRING)))), - DSL.named("AVG(@value)", DSL.ref("AVG(@value)", DOUBLE))) - ) - ); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))), + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(DSL.named("job", DSL.ref("job", STRING)))), + DSL.named("AVG(@value)", DSL.ref("AVG(@value)", DOUBLE))))); } - private LogicalPlan optimize(LogicalPlan plan) { final LogicalPlanOptimizer optimizer = PrometheusLogicalPlanOptimizerFactory.create(); return optimizer.optimize(plan); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java index dfc9aee7dc..9add7896cf 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java @@ -37,54 +37,61 @@ @ExtendWith(MockitoExtension.class) public class PrometheusDescribeMetricRequestTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows void testGetFieldTypes() { - when(prometheusClient.getLabels(METRIC_NAME)).thenReturn(new ArrayList() {{ - add("call"); - add("code"); - } - }); - Map expected = new HashMap<>() {{ - put("call", ExprCoreType.STRING); - put("code", ExprCoreType.STRING); - put(VALUE, ExprCoreType.DOUBLE); - put(TIMESTAMP, ExprCoreType.TIMESTAMP); - }}; - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); + when(prometheusClient.getLabels(METRIC_NAME)) + .thenReturn( + new ArrayList() { + { + add("call"); + add("code"); + } + }); + Map expected = + new HashMap<>() { + { + put("call", ExprCoreType.STRING); + put("code", ExprCoreType.STRING); + put(VALUE, ExprCoreType.DOUBLE); + put(TIMESTAMP, ExprCoreType.TIMESTAMP); + } + }; + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); assertEquals(expected, prometheusDescribeMetricRequest.getFieldTypes()); verify(prometheusClient, times(1)).getLabels(METRIC_NAME); } - @Test @SneakyThrows void testGetFieldTypesWithEmptyMetricName() { - Map expected = new HashMap<>() {{ - put(VALUE, ExprCoreType.DOUBLE); - put(TIMESTAMP, ExprCoreType.TIMESTAMP); - }}; - assertThrows(NullPointerException.class, - () -> new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), - null)); + Map expected = + new HashMap<>() { + { + put(VALUE, ExprCoreType.DOUBLE); + put(TIMESTAMP, ExprCoreType.TIMESTAMP); + } + }; + assertThrows( + NullPointerException.class, + () -> + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), null)); } - @Test @SneakyThrows void testGetFieldTypesWhenException() { when(prometheusClient.getLabels(METRIC_NAME)).thenThrow(new RuntimeException("ERROR Message")); - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); - RuntimeException exception = assertThrows(RuntimeException.class, - prometheusDescribeMetricRequest::getFieldTypes); + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); + RuntimeException exception = + assertThrows(RuntimeException.class, prometheusDescribeMetricRequest::getFieldTypes); verify(prometheusClient, times(1)).getLabels(METRIC_NAME); assertEquals("ERROR Message", exception.getMessage()); } @@ -93,27 +100,30 @@ void testGetFieldTypesWhenException() { @SneakyThrows void testGetFieldTypesWhenIOException() { when(prometheusClient.getLabels(METRIC_NAME)).thenThrow(new IOException("ERROR Message")); - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); - RuntimeException exception = assertThrows(RuntimeException.class, - prometheusDescribeMetricRequest::getFieldTypes); - assertEquals("Error while fetching labels for http_requests_total" - + " from prometheus: ERROR Message", exception.getMessage()); + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); + RuntimeException exception = + assertThrows(RuntimeException.class, prometheusDescribeMetricRequest::getFieldTypes); + assertEquals( + "Error while fetching labels for http_requests_total" + " from prometheus: ERROR Message", + exception.getMessage()); verify(prometheusClient, times(1)).getLabels(METRIC_NAME); } @Test @SneakyThrows void testSearch() { - when(prometheusClient.getLabels(METRIC_NAME)).thenReturn(new ArrayList<>() { - { - add("call"); - } - }); - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("test", "default"), METRIC_NAME); + when(prometheusClient.getLabels(METRIC_NAME)) + .thenReturn( + new ArrayList<>() { + { + add("call"); + } + }); + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("test", "default"), METRIC_NAME); List result = prometheusDescribeMetricRequest.search(); assertEquals(3, result.size()); assertEquals(expectedRow(), result.get(0)); @@ -129,5 +139,4 @@ private ExprValue expectedRow() { valueMap.put("DATA_TYPE", stringValue(ExprCoreType.STRING.legacyTypeName().toLowerCase())); return new ExprTupleValue(valueMap); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java index bf5bb22e96..09f63463b5 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java @@ -35,45 +35,46 @@ @ExtendWith(MockitoExtension.class) public class PrometheusListMetricsRequestTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows void testSearch() { Map> metricsResult = new HashMap<>(); - metricsResult.put("go_gc_duration_seconds", - Collections.singletonList(new MetricMetadata("summary", - "A summary of the pause duration of garbage collection cycles.", ""))); - metricsResult.put("go_goroutines", - Collections.singletonList(new MetricMetadata("gauge", - "Number of goroutines that currently exist.", ""))); + metricsResult.put( + "go_gc_duration_seconds", + Collections.singletonList( + new MetricMetadata( + "summary", "A summary of the pause duration of garbage collection cycles.", ""))); + metricsResult.put( + "go_goroutines", + Collections.singletonList( + new MetricMetadata("gauge", "Number of goroutines that currently exist.", ""))); when(prometheusClient.getAllMetrics()).thenReturn(metricsResult); - PrometheusListMetricsRequest prometheusListMetricsRequest - = new PrometheusListMetricsRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "information_schema")); + PrometheusListMetricsRequest prometheusListMetricsRequest = + new PrometheusListMetricsRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "information_schema")); List result = prometheusListMetricsRequest.search(); assertEquals(expectedRow(), result.get(0)); assertEquals(2, result.size()); verify(prometheusClient, times(1)).getAllMetrics(); } - @Test @SneakyThrows void testSearchWhenIOException() { when(prometheusClient.getAllMetrics()).thenThrow(new IOException("ERROR Message")); - PrometheusListMetricsRequest prometheusListMetricsRequest - = new PrometheusListMetricsRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "information_schema")); - RuntimeException exception = assertThrows(RuntimeException.class, - prometheusListMetricsRequest::search); - assertEquals("Error while fetching metric list for from prometheus: ERROR Message", + PrometheusListMetricsRequest prometheusListMetricsRequest = + new PrometheusListMetricsRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "information_schema")); + RuntimeException exception = + assertThrows(RuntimeException.class, prometheusListMetricsRequest::search); + assertEquals( + "Error while fetching metric list for from prometheus: ERROR Message", exception.getMessage()); verify(prometheusClient, times(1)).getAllMetrics(); } - private ExprTupleValue expectedRow() { LinkedHashMap valueMap = new LinkedHashMap<>(); valueMap.put("TABLE_CATALOG", stringValue("prometheus")); @@ -81,9 +82,8 @@ private ExprTupleValue expectedRow() { valueMap.put("TABLE_NAME", stringValue("go_gc_duration_seconds")); valueMap.put("TABLE_TYPE", stringValue("summary")); valueMap.put("UNIT", stringValue("")); - valueMap.put("REMARKS", - stringValue("A summary of the pause duration of garbage collection cycles.")); + valueMap.put( + "REMARKS", stringValue("A summary of the pause duration of garbage collection cycles.")); return new ExprTupleValue(valueMap); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java index 9c0207853c..00ddc973bc 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java @@ -44,8 +44,7 @@ @ExtendWith(MockitoExtension.class) public class PrometheusMetricScanTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows @@ -60,24 +59,30 @@ void testQueryResponseIterator() { .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put(VALUE, new ExprDoubleValue(1)); - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put(VALUE, new ExprDoubleValue(1)); + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put("@timestamp", new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("@value", new ExprDoubleValue(0)); - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("@timestamp", new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("@value", new ExprDoubleValue(0)); + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } @@ -85,8 +90,7 @@ void testQueryResponseIterator() { @Test @SneakyThrows void testQueryResponseIteratorWithGivenPrometheusResponseFieldNames() { - PrometheusResponseFieldNames prometheusResponseFieldNames - = new PrometheusResponseFieldNames(); + PrometheusResponseFieldNames prometheusResponseFieldNames = new PrometheusResponseFieldNames(); prometheusResponseFieldNames.setValueFieldName("count()"); prometheusResponseFieldNames.setValueType(INTEGER); prometheusResponseFieldNames.setTimestampFieldName(TIMESTAMP); @@ -101,34 +105,38 @@ void testQueryResponseIteratorWithGivenPrometheusResponseFieldNames() { .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("count()", new ExprIntegerValue(1)); - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("count()", new ExprIntegerValue(1)); + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("count()", new ExprIntegerValue(0)); - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("count()", new ExprIntegerValue(0)); + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } - @Test @SneakyThrows void testQueryResponseIteratorWithGivenPrometheusResponseWithLongInAggType() { - PrometheusResponseFieldNames prometheusResponseFieldNames - = new PrometheusResponseFieldNames(); + PrometheusResponseFieldNames prometheusResponseFieldNames = new PrometheusResponseFieldNames(); prometheusResponseFieldNames.setValueFieldName("testAgg"); prometheusResponseFieldNames.setValueType(LONG); prometheusResponseFieldNames.setTimestampFieldName(TIMESTAMP); @@ -143,24 +151,30 @@ void testQueryResponseIteratorWithGivenPrometheusResponseWithLongInAggType() { .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(1)); - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(1)); + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(0)); - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(0)); + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } @@ -168,8 +182,7 @@ void testQueryResponseIteratorWithGivenPrometheusResponseWithLongInAggType() { @Test @SneakyThrows void testQueryResponseIteratorWithGivenPrometheusResponseWithBackQuotedFieldNames() { - PrometheusResponseFieldNames prometheusResponseFieldNames - = new PrometheusResponseFieldNames(); + PrometheusResponseFieldNames prometheusResponseFieldNames = new PrometheusResponseFieldNames(); prometheusResponseFieldNames.setValueFieldName("testAgg"); prometheusResponseFieldNames.setValueType(LONG); prometheusResponseFieldNames.setTimestampFieldName(TIMESTAMP); @@ -186,29 +199,34 @@ void testQueryResponseIteratorWithGivenPrometheusResponseWithBackQuotedFieldName .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(1)); - put("`instance`", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(1)); + put("`instance`", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(0)); - put("`instance`", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(0)); + put("`instance`", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } - @Test @SneakyThrows void testEmptyQueryResponseIterator() { @@ -235,11 +253,12 @@ void testEmptyQueryWithNoMatrixKeyInResultJson() { when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("no_matrix_query_range_result.json"))); - RuntimeException runtimeException - = Assertions.assertThrows(RuntimeException.class, prometheusMetricScan::open); + RuntimeException runtimeException = + Assertions.assertThrows(RuntimeException.class, prometheusMetricScan::open); assertEquals( "Unexpected Result Type: vector during Prometheus Response Parsing. " - + "'matrix' resultType is expected", runtimeException.getMessage()); + + "'matrix' resultType is expected", + runtimeException.getMessage()); } @Test @@ -253,13 +272,12 @@ void testEmptyQueryWithException() { when(prometheusClient.queryRange(any(), any(), any(), any())) .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, prometheusMetricScan::open); - assertEquals("Error fetching data from prometheus server. Error Message", - runtimeException.getMessage()); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, prometheusMetricScan::open); + assertEquals( + "Error fetching data from prometheus server. Error Message", runtimeException.getMessage()); } - @Test @SneakyThrows void testExplain() { @@ -273,5 +291,4 @@ void testExplain() { + "endTime=1664771294133, step=14)", prometheusMetricScan.explain()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java index d43c38fc68..8bdab9244b 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java @@ -62,15 +62,14 @@ @ExtendWith(MockitoExtension.class) class PrometheusMetricTableTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; @Test @SneakyThrows void testGetFieldTypesFromMetric() { when(client.getLabels(TestConstants.METRIC_NAME)).thenReturn(List.of("label1", "label2")); - PrometheusMetricTable prometheusMetricTable - = new PrometheusMetricTable(client, TestConstants.METRIC_NAME); + PrometheusMetricTable prometheusMetricTable = + new PrometheusMetricTable(client, TestConstants.METRIC_NAME); Map expectedFieldTypes = new HashMap<>(); expectedFieldTypes.put("label1", ExprCoreType.STRING); expectedFieldTypes.put("label2", ExprCoreType.STRING); @@ -84,7 +83,7 @@ void testGetFieldTypesFromMetric() { assertNull(prometheusMetricTable.getPrometheusQueryRequest()); assertNotNull(prometheusMetricTable.getMetricName()); - //testing Caching + // testing Caching fieldTypes = prometheusMetricTable.getFieldTypes(); assertEquals(expectedFieldTypes, fieldTypes); @@ -96,8 +95,8 @@ void testGetFieldTypesFromMetric() { @Test @SneakyThrows void testGetFieldTypesFromPrometheusQueryRequest() { - PrometheusMetricTable prometheusMetricTable - = new PrometheusMetricTable(client, new PrometheusQueryRequest()); + PrometheusMetricTable prometheusMetricTable = + new PrometheusMetricTable(client, new PrometheusQueryRequest()); Map expectedFieldTypes = new HashMap<>(); expectedFieldTypes.put(VALUE, ExprCoreType.DOUBLE); expectedFieldTypes.put(TIMESTAMP, ExprCoreType.TIMESTAMP); @@ -117,14 +116,17 @@ void testImplementWithBasicMetricQuery() { new PrometheusMetricTable(client, "prometheus_http_requests_total"); List finalProjectList = new ArrayList<>(); finalProjectList.add(named("@value", ref("@value", ExprCoreType.DOUBLE))); - PhysicalPlan plan = prometheusMetricTable.implement( - project(relation("prometheus_http_requests_total", prometheusMetricTable), - finalProjectList, null)); + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + relation("prometheus_http_requests_total", prometheusMetricTable), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE), outputFields); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); PrometheusMetricScan prometheusMetricScan = @@ -133,7 +135,6 @@ void testImplementWithBasicMetricQuery() { assertEquals(3600 / 250 + "s", prometheusMetricScan.getRequest().getStep()); } - @Test void testImplementPrometheusQueryWithStatsQueryAndNoFilter() { @@ -141,16 +142,23 @@ void testImplementPrometheusQueryWithStatsQueryAndNoFilter() { new PrometheusMetricTable(client, "prometheus_http_total_requests"); // IndexScanAgg without Filter - PhysicalPlan plan = prometheusMetricTable.implement( - filter( - indexScanAgg("prometheus_http_total_requests", ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("code", DSL.ref("code", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + filter( + indexScanAgg( + "prometheus_http_total_requests", + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("code", DSL.ref("code", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); assertTrue(plan.getChild().get(0) instanceof PrometheusMetricScan); PrometheusQueryRequest prometheusQueryRequest = @@ -166,28 +174,31 @@ void testImplementPrometheusQueryWithStatsQueryAndFilter() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - // IndexScanAgg with Filter - PhysicalPlan plan = prometheusMetricTable.implement( - indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s"))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s"))))); assertTrue(plan instanceof PrometheusMetricScan); PrometheusQueryRequest prometheusQueryRequest = ((PrometheusMetricScan) plan).getRequest(); assertEquals( "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", prometheusQueryRequest.getPromQl()); - } - @Test void testImplementPrometheusQueryWithStatsQueryAndFilterAndProject() { @@ -198,77 +209,99 @@ void testImplementPrometheusQueryWithStatsQueryAndFilterAndProject() { List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(DSL.named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + DSL.named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals(request.getStep(), "40s"); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testTimeRangeResolver() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - - //Both endTime and startTime are set. + // Both endTime and startTime are set. List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -278,40 +311,51 @@ void testTimeRangeResolverWithOutEndTimeInFilter() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - - //Only endTime is set. + // Only endTime is set. List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -321,78 +365,95 @@ void testTimeRangeResolverWithOutStartTimeInFilter() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - - //Both endTime and startTime are set. + // Both endTime and startTime are set. List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testSpanResolverWithoutSpanExpression() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), DSL.and( DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), + fromObjectValue( + dateFormat.format(new Date(startTime)), ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), null), - finalProjectList, null); - RuntimeException runtimeException - = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - Assertions.assertEquals("Prometheus Catalog doesn't support " - + "aggregations without span expression", + finalProjectList, + null); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + Assertions.assertEquals( + "Prometheus Catalog doesn't support " + "aggregations without span expression", runtimeException.getMessage()); } @@ -402,34 +463,41 @@ void testSpanResolverWithEmptyGroupByList() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of()), - finalProjectList, null); - RuntimeException runtimeException - = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - Assertions.assertEquals("Prometheus Catalog doesn't support " - + "aggregations without span expression", + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of()), + finalProjectList, + null); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + Assertions.assertEquals( + "Prometheus Catalog doesn't support " + "aggregations without span expression", runtimeException.getMessage()); } @@ -439,44 +507,58 @@ void testSpanResolverWithSpanExpression() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -486,35 +568,45 @@ void testExpressionWithMissingTimeUnitInSpanExpression() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - LogicalPlan logicalPlan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + LogicalPlan logicalPlan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), DSL.and( DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), + fromObjectValue( + dateFormat.format(new Date(startTime)), ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "")))), - finalProjectList, null); + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal(40), "")))), + finalProjectList, + null); RuntimeException exception = - Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(logicalPlan)); + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(logicalPlan)); assertEquals("Missing TimeUnit in the span expression", exception.getMessage()); } @@ -524,44 +616,57 @@ void testPrometheusQueryWithOnlySpanExpressionInGroupByList() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of( - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg (avg_over_time" + assertEquals( + "avg (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -571,44 +676,57 @@ void testStatsWithNoGroupByList() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("span", - DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg (avg_over_time" + assertEquals( + "avg (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -617,9 +735,11 @@ void testImplementWithUnexpectedLogicalNode() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); LogicalPlan plan = project(testLogicalPlanNode()); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - assertEquals("unexpected plan node type class" + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + assertEquals( + "unexpected plan node type class" + " org.opensearch.sql.prometheus.utils.LogicalPlanUtils$TestLogicalPlan", runtimeException.getMessage()); } @@ -629,37 +749,44 @@ void testMultipleAggregationsThrowsRuntimeException() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER))), - named("SUM(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING))))); - - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - assertEquals("Prometheus Catalog doesn't multiple aggregations in stats command", + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of( + named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER))), + named("SUM(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(named("job", DSL.ref("job", STRING))))); + + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + assertEquals( + "Prometheus Catalog doesn't multiple aggregations in stats command", runtimeException.getMessage()); } - @Test void testUnSupportedAggregation() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("VAR_SAMP(@value)", - DSL.varSamp(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING))))); - - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of( + named("VAR_SAMP(@value)", DSL.varSamp(DSL.ref("@value", INTEGER)))), + ImmutableList.of(named("job", DSL.ref("job", STRING))))); + + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); assertTrue(runtimeException.getMessage().contains("Prometheus Catalog only supports")); } @@ -667,13 +794,16 @@ void testUnSupportedAggregation() { void testImplementWithORConditionInWhereClause() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan plan = indexScan("prometheus_http_total_requests", - DSL.or(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); - RuntimeException exception - = assertThrows(RuntimeException.class, () -> prometheusMetricTable.implement(plan)); - assertEquals("Prometheus Datasource doesn't support or in where command.", - exception.getMessage()); + LogicalPlan plan = + indexScan( + "prometheus_http_total_requests", + DSL.or( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); + RuntimeException exception = + assertThrows(RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + assertEquals( + "Prometheus Datasource doesn't support or in where command.", exception.getMessage()); } @Test @@ -683,21 +813,26 @@ void testImplementWithRelationAndFilter() { finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))), - finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}", - request.getPromQl()); + assertEquals( + "prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -710,27 +845,30 @@ void testImplementWithRelationAndTimestampFilter() { Long endTime = new Date(System.currentTimeMillis()).getTime(); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))) - ), finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests", - request.getPromQl()); + assertEquals("prometheus_http_total_requests", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testImplementWithRelationAndTimestampLTFilter() { List finalProjectList = new ArrayList<>(); @@ -740,27 +878,30 @@ void testImplementWithRelationAndTimestampLTFilter() { Long endTime = new Date(System.currentTimeMillis()).getTime(); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.less(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))) - ), finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.less( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests", - request.getPromQl()); + assertEquals("prometheus_http_total_requests", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testImplementWithRelationAndTimestampGTFilter() { List finalProjectList = new ArrayList<>(); @@ -770,23 +911,27 @@ void testImplementWithRelationAndTimestampGTFilter() { Long endTime = new Date(System.currentTimeMillis()).getTime(); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.greater(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))) - ), finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.greater( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests", - request.getPromQl()); + assertEquals("prometheus_http_total_requests", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -796,10 +941,9 @@ void testOptimize() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, prometheusQueryRequest); List finalProjectList = new ArrayList<>(); - LogicalPlan inputPlan = project(relation("query_range", prometheusMetricTable), - finalProjectList, null); - LogicalPlan optimizedPlan = prometheusMetricTable.optimize( - inputPlan); + LogicalPlan inputPlan = + project(relation("query_range", prometheusMetricTable), finalProjectList, null); + LogicalPlan optimizedPlan = prometheusMetricTable.optimize(inputPlan); assertEquals(inputPlan, optimizedPlan); } @@ -810,7 +954,8 @@ void testUnsupportedOperation() { new PrometheusMetricTable(client, prometheusQueryRequest); assertThrows(UnsupportedOperationException.class, prometheusMetricTable::exists); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> prometheusMetricTable.create(Collections.emptyMap())); } @@ -820,25 +965,29 @@ void testImplementPrometheusQueryWithBackQuotedFieldNamesInStatsQuery() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - // IndexScanAgg with Filter - PhysicalPlan plan = prometheusMetricTable.implement( - indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("`job`", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s"))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("`job`", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s"))))); assertTrue(plan instanceof PrometheusMetricScan); PrometheusQueryRequest prometheusQueryRequest = ((PrometheusMetricScan) plan).getRequest(); assertEquals( "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", prometheusQueryRequest.getPromQl()); - } @Test @@ -848,14 +997,16 @@ void testImplementPrometheusQueryWithFilterQuery() { new PrometheusMetricTable(client, "prometheus_http_total_requests"); // IndexScanAgg without Filter - PhysicalPlan plan = prometheusMetricTable.implement( - indexScan("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + indexScan( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); assertTrue(plan instanceof PrometheusMetricScan); - PrometheusQueryRequest prometheusQueryRequest = - ((PrometheusMetricScan) plan).getRequest(); + PrometheusQueryRequest prometheusQueryRequest = ((PrometheusMetricScan) plan).getRequest(); assertEquals( "prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}", prometheusQueryRequest.getPromQl()); @@ -867,15 +1018,22 @@ void testImplementPrometheusQueryWithUnsupportedFilterQuery() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(indexScan("prometheus_http_total_requests", - DSL.and(DSL.lte(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))))); - assertEquals("Prometheus Datasource doesn't support <= in where command.", - exception.getMessage()); + RuntimeException exception = + assertThrows( + RuntimeException.class, + () -> + prometheusMetricTable.implement( + indexScan( + "prometheus_http_total_requests", + DSL.and( + DSL.lte(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal( + DSL.ref("handler", STRING), + DSL.literal(stringValue("/ready/"))))))); + assertEquals( + "Prometheus Datasource doesn't support <= in where command.", exception.getMessage()); } - @Test void testCreateScanBuilderWithQueryRangeTableFunction() { PrometheusQueryRequest prometheusQueryRequest = new PrometheusQueryRequest(); @@ -895,5 +1053,4 @@ void testCreateScanBuilderWithPPLQuery() { TableScanBuilder tableScanBuilder = prometheusMetricTable.createScanBuilder(); Assertions.assertNull(tableScanBuilder); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java index 4e8d470373..b925fe6538 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java @@ -29,8 +29,7 @@ @ExtendWith(MockitoExtension.class) class PrometheusStorageEngineTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; @Test public void getTable() { @@ -43,15 +42,12 @@ public void getTable() { @Test public void getFunctions() { PrometheusStorageEngine engine = new PrometheusStorageEngine(client); - Collection functionResolverCollection - = engine.getFunctions(); + Collection functionResolverCollection = engine.getFunctions(); assertNotNull(functionResolverCollection); assertEquals(2, functionResolverCollection.size()); Iterator iterator = functionResolverCollection.iterator(); - assertTrue( - iterator.next() instanceof QueryRangeTableFunctionResolver); - assertTrue( - iterator.next() instanceof QueryExemplarsTableFunctionResolver); + assertTrue(iterator.next() instanceof QueryRangeTableFunctionResolver); + assertTrue(iterator.next() instanceof QueryExemplarsTableFunctionResolver); } @Test @@ -65,8 +61,8 @@ public void getSystemTable() { @Test public void getSystemTableForAllTablesInfo() { PrometheusStorageEngine engine = new PrometheusStorageEngine(client); - Table table - = engine.getTable(new DataSourceSchemaName("prometheus", "information_schema"), "tables"); + Table table = + engine.getTable(new DataSourceSchemaName("prometheus", "information_schema"), "tables"); assertNotNull(table); assertTrue(table instanceof PrometheusSystemTable); } @@ -74,10 +70,12 @@ public void getSystemTableForAllTablesInfo() { @Test public void getSystemTableWithWrongInformationSchemaTable() { PrometheusStorageEngine engine = new PrometheusStorageEngine(client); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> engine.getTable(new DataSourceSchemaName("prometheus", "information_schema"), - "test")); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> + engine.getTable( + new DataSourceSchemaName("prometheus", "information_schema"), "test")); assertEquals("Information Schema doesn't contain test table", exception.getMessage()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java index c566ccdeb4..c2e8e5325a 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java @@ -26,8 +26,7 @@ @ExtendWith(MockitoExtension.class) public class PrometheusStorageFactoryTest { - @Mock - private Settings settings; + @Mock private Settings settings; @Test void testGetConnectorType() { @@ -46,8 +45,7 @@ void testGetStorageEngineWithBasicAuth() { properties.put("prometheus.auth.type", "basicauth"); properties.put("prometheus.auth.username", "admin"); properties.put("prometheus.auth.password", "admin"); - StorageEngine storageEngine - = prometheusStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = prometheusStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof PrometheusStorageEngine); } @@ -62,12 +60,10 @@ void testGetStorageEngineWithAWSSigV4Auth() { properties.put("prometheus.auth.region", "us-east-1"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - StorageEngine storageEngine - = prometheusStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = prometheusStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof PrometheusStorageEngine); } - @Test @SneakyThrows void testGetStorageEngineWithMissingURI() { @@ -77,10 +73,12 @@ void testGetStorageEngineWithMissingURI() { properties.put("prometheus.auth.region", "us-east-1"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Missing [prometheus.uri] fields " - + "in the Prometheus connector properties.", + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "Missing [prometheus.uri] fields " + "in the Prometheus connector properties.", exception.getMessage()); } @@ -93,14 +91,15 @@ void testGetStorageEngineWithMissingRegionInAWS() { properties.put("prometheus.auth.type", "awssigv4"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Missing [prometheus.auth.region] fields in the " - + "Prometheus connector properties.", + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "Missing [prometheus.auth.region] fields in the " + "Prometheus connector properties.", exception.getMessage()); } - @Test @SneakyThrows void testGetStorageEngineWithLongConfigProperties() { @@ -110,9 +109,12 @@ void testGetStorageEngineWithLongConfigProperties() { properties.put("prometheus.auth.type", "awssigv4"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Missing [prometheus.auth.region] fields in the " + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "Missing [prometheus.auth.region] fields in the " + "Prometheus connector properties." + "Fields [prometheus.uri] exceeds more than 1000 characters.", exception.getMessage()); @@ -129,13 +131,14 @@ void testGetStorageEngineWithWrongAuthType() { properties.put("prometheus.auth.region", "us-east-1"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("AUTH Type : random is not supported with Prometheus Connector", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "AUTH Type : random is not supported with Prometheus Connector", exception.getMessage()); } - @Test @SneakyThrows void testGetStorageEngineWithNONEAuthType() { @@ -143,8 +146,7 @@ void testGetStorageEngineWithNONEAuthType() { PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); HashMap properties = new HashMap<>(); properties.put("prometheus.uri", "https://test.com"); - StorageEngine storageEngine - = prometheusStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = prometheusStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof PrometheusStorageEngine); } @@ -157,8 +159,9 @@ void testGetStorageEngineWithInvalidURISyntax() { properties.put("prometheus.auth.type", "basicauth"); properties.put("prometheus.auth.username", "admin"); properties.put("prometheus.auth.password", "admin"); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.getStorageEngine(properties)); Assertions.assertTrue( exception.getMessage().contains("Invalid URI in prometheus properties: ")); } @@ -213,10 +216,13 @@ void createDataSourceWithInvalidHostname() { metadata.setProperties(properties); PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.createDataSource(metadata)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.createDataSource(metadata)); Assertions.assertTrue( - exception.getMessage().contains("Invalid hostname in the uri: http://dummyprometheus:9090")); + exception + .getMessage() + .contains("Invalid hostname in the uri: http://dummyprometheus:9090")); } @Test @@ -233,8 +239,9 @@ void createDataSourceWithInvalidIp() { metadata.setProperties(properties); PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.createDataSource(metadata)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.createDataSource(metadata)); Assertions.assertTrue( exception.getMessage().contains("Invalid hostname in the uri: http://231.54.11.987:9090")); } @@ -255,11 +262,15 @@ void createDataSourceWithHostnameNotMatchingWithAllowHostsConfig() { metadata.setProperties(properties); PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.createDataSource(metadata)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.createDataSource(metadata)); Assertions.assertTrue( - exception.getMessage().contains("Disallowed hostname in the uri: http://localhost.com:9090. " - + "Validate with plugins.query.datasources.uri.allowhosts config")); + exception + .getMessage() + .contains( + "Disallowed hostname in the uri: http://localhost.com:9090. " + + "Validate with plugins.query.datasources.uri.allowhosts config")); } @Test @@ -279,5 +290,4 @@ void createDataSourceSuccessWithHostnameRestrictions() { DataSource dataSource = new PrometheusStorageFactory(settings).createDataSource(metadata); Assertions.assertTrue(dataSource.getStorageEngine() instanceof PrometheusStorageEngine); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java index 19876d398d..7f49de981a 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java @@ -30,14 +30,12 @@ @ExtendWith(MockitoExtension.class) class QueryExemplarsTableTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; @Test @SneakyThrows void testGetFieldTypes() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); @@ -50,8 +48,7 @@ void testGetFieldTypes() { @Test void testImplementWithBasicMetricQuery() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); @@ -67,8 +64,7 @@ void testImplementWithBasicMetricQuery() { @Test void testCreateScanBuilderWithQueryRangeTableFunction() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); @@ -77,5 +73,4 @@ void testCreateScanBuilderWithQueryRangeTableFunction() { Assertions.assertNotNull(tableScanBuilder); Assertions.assertTrue(tableScanBuilder instanceof QueryExemplarsFunctionTableScanBuilder); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java index 37e24a56b5..397b7146f7 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java @@ -18,9 +18,11 @@ public class StepParameterResolverTest { @Test void testNullChecks() { StepParameterResolver stepParameterResolver = new StepParameterResolver(); - Assertions.assertThrows(NullPointerException.class, + Assertions.assertThrows( + NullPointerException.class, () -> stepParameterResolver.resolve(null, new Date().getTime(), Collections.emptyList())); - Assertions.assertThrows(NullPointerException.class, + Assertions.assertThrows( + NullPointerException.class, () -> stepParameterResolver.resolve(new Date().getTime(), null, Collections.emptyList())); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java index 73839e2152..6a280b7d98 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java @@ -21,9 +21,11 @@ public class TimeRangeParametersResolverTest { @Test void testTimeRangeParametersWithoutTimestampFilter() { TimeRangeParametersResolver timeRangeParametersResolver = new TimeRangeParametersResolver(); - Pair result = timeRangeParametersResolver.resolve( - DSL.and(DSL.less(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); + Pair result = + timeRangeParametersResolver.resolve( + DSL.and( + DSL.less(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); Assertions.assertNotNull(result); Assertions.assertEquals(3600, result.getSecond() - result.getFirst()); } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java index 0d7ec4e2cc..ea299b87de 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java @@ -22,8 +22,7 @@ @ExtendWith(MockitoExtension.class) public class PrometheusSystemTableScanTest { - @Mock - private PrometheusSystemRequest request; + @Mock private PrometheusSystemRequest request; @Test public void queryData() { diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java index 0721f82c07..7022ca9657 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java @@ -35,51 +35,41 @@ @ExtendWith(MockitoExtension.class) public class PrometheusSystemTableTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; - @Mock - private Table table; + @Mock private Table table; @Test void testGetFieldTypesOfMetaTable() { - PrometheusSystemTable systemIndex = new PrometheusSystemTable(client, - new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); + PrometheusSystemTable systemIndex = + new PrometheusSystemTable( + client, new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("TABLE_CATALOG", STRING) - )); - assertThat(fieldTypes, anyOf( - hasEntry("UNIT", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("TABLE_CATALOG", STRING))); + assertThat(fieldTypes, anyOf(hasEntry("UNIT", STRING))); } @Test void testGetFieldTypesOfMappingTable() { - PrometheusSystemTable systemIndex = new PrometheusSystemTable(client, - new DataSourceSchemaName("prometheus", "information_schema"), mappingTable( - "test_metric")); + PrometheusSystemTable systemIndex = + new PrometheusSystemTable( + client, + new DataSourceSchemaName("prometheus", "information_schema"), + mappingTable("test_metric")); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("COLUMN_NAME", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("COLUMN_NAME", STRING))); } - - @Test void implement() { - PrometheusSystemTable systemIndex = new PrometheusSystemTable(client, - new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); + PrometheusSystemTable systemIndex = + new PrometheusSystemTable( + client, new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); NamedExpression projectExpr = named("TABLE_NAME", ref("TABLE_NAME", STRING)); - final PhysicalPlan plan = systemIndex.implement( - project( - relation(TABLE_INFO, table), - projectExpr - )); + final PhysicalPlan plan = + systemIndex.implement(project(relation(TABLE_INFO, table), projectExpr)); assertTrue(plan instanceof ProjectOperator); assertTrue(plan.getChild().get(0) instanceof PrometheusSystemTableScan); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java index 5fcebf52e6..570a987889 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java @@ -19,43 +19,36 @@ public class LogicalPlanUtils { - /** - * Build PrometheusLogicalMetricScan. - */ + /** Build PrometheusLogicalMetricScan. */ public static LogicalPlan indexScan(String metricName, Expression filter) { - return PrometheusLogicalMetricScan.builder().metricName(metricName) - .filter(filter) - .build(); + return PrometheusLogicalMetricScan.builder().metricName(metricName).filter(filter).build(); } - /** - * Build PrometheusLogicalMetricAgg. - */ - public static LogicalPlan indexScanAgg(String metricName, Expression filter, - List aggregators, - List groupByList) { - return PrometheusLogicalMetricAgg.builder().metricName(metricName) + /** Build PrometheusLogicalMetricAgg. */ + public static LogicalPlan indexScanAgg( + String metricName, + Expression filter, + List aggregators, + List groupByList) { + return PrometheusLogicalMetricAgg.builder() + .metricName(metricName) .filter(filter) .aggregatorList(aggregators) .groupByList(groupByList) .build(); } - /** - * Build PrometheusLogicalMetricAgg. - */ - public static LogicalPlan indexScanAgg(String metricName, - List aggregators, - List groupByList) { - return PrometheusLogicalMetricAgg.builder().metricName(metricName) + /** Build PrometheusLogicalMetricAgg. */ + public static LogicalPlan indexScanAgg( + String metricName, List aggregators, List groupByList) { + return PrometheusLogicalMetricAgg.builder() + .metricName(metricName) .aggregatorList(aggregators) .groupByList(groupByList) .build(); } - /** - * Build PrometheusLogicalMetricAgg. - */ + /** Build PrometheusLogicalMetricAgg. */ public static LogicalPlan testLogicalPlanNode() { return new TestLogicalPlan(); } @@ -71,7 +64,4 @@ public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitNode(this, null); } } - - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java index 1683858c49..a9fcc26101 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java @@ -12,6 +12,7 @@ public class TestUtils { /** * Get Json document from the files in resources folder. + * * @param filename filename. * @return String. * @throws IOException IOException. @@ -21,5 +22,4 @@ public static String getJson(String filename) throws IOException { return new String( Objects.requireNonNull(classLoader.getResourceAsStream(filename)).readAllBytes()); } - } diff --git a/protocol/build.gradle b/protocol/build.gradle index 92a1aa0917..dcec1c675b 100644 --- a/protocol/build.gradle +++ b/protocol/build.gradle @@ -43,6 +43,9 @@ dependencies { testImplementation group: 'org.mockito', name: 'mockito-junit-jupiter', version: '3.12.4' } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + configurations.all { resolutionStrategy.force "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}" } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java index 3ce1dd8875..03be0875cf 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response; import java.util.Collection; @@ -20,22 +19,18 @@ import org.opensearch.sql.executor.pagination.Cursor; /** - * Query response that encapsulates query results and isolate {@link ExprValue} - * related from formatter implementation. + * Query response that encapsulates query results and isolate {@link ExprValue} related from + * formatter implementation. */ @RequiredArgsConstructor public class QueryResult implements Iterable { - @Getter - private final ExecutionEngine.Schema schema; + @Getter private final ExecutionEngine.Schema schema; - /** - * Results which are collection of expression. - */ + /** Results which are collection of expression. */ private final Collection exprValues; - @Getter - private final Cursor cursor; + @Getter private final Cursor cursor; public QueryResult(ExecutionEngine.Schema schema, Collection exprValues) { this(schema, exprValues, Cursor.None); @@ -43,6 +38,7 @@ public QueryResult(ExecutionEngine.Schema schema, Collection exprValu /** * size of results. + * * @return size of results */ public int size() { @@ -52,14 +48,18 @@ public int size() { /** * Parse column name from results. * - * @return mapping from column names to its expression type. - * note that column name could be original name or its alias if any. + * @return mapping from column names to its expression type. note that column name could be + * original name or its alias if any. */ public Map columnNameTypes() { Map colNameTypes = new LinkedHashMap<>(); - schema.getColumns().forEach(column -> colNameTypes.put( - getColumnName(column), - column.getExprType().typeName().toLowerCase(Locale.ROOT))); + schema + .getColumns() + .forEach( + column -> + colNameTypes.put( + getColumnName(column), + column.getExprType().typeName().toLowerCase(Locale.ROOT))); return colNameTypes; } @@ -78,9 +78,6 @@ private String getColumnName(Column column) { } private Object[] convertExprValuesToValues(Collection exprValues) { - return exprValues - .stream() - .map(ExprValue::value) - .toArray(Object[]::new); + return exprValues.stream().map(ExprValue::value).toArray(Object[]::new); } } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java index dfd0f91931..b781e1dbba 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java @@ -10,8 +10,8 @@ import org.opensearch.sql.protocol.response.QueryResult; /** - * A simple response formatter which contains no data. - * Supposed to use with {@link CommandPlan} only. + * A simple response formatter which contains no data. Supposed to use with {@link CommandPlan} + * only. */ public class CommandResponseFormatter extends JsonResponseFormatter { diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java index 5c5b4be048..a61b54b258 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; public class CsvResponseFormatter extends FlatResponseFormatter { @@ -14,5 +13,4 @@ public CsvResponseFormatter() { public CsvResponseFormatter(boolean sanitize) { super(",", sanitize); } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java index 40848e959b..5c85e5d65b 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import com.google.gson.Gson; @@ -17,35 +16,28 @@ @UtilityClass public class ErrorFormatter { - private static final Gson PRETTY_PRINT_GSON = AccessController.doPrivileged( - (PrivilegedAction) () -> new GsonBuilder() - .setPrettyPrinting() - .disableHtmlEscaping() - .create()); - private static final Gson GSON = AccessController.doPrivileged( - (PrivilegedAction) () -> new GsonBuilder().disableHtmlEscaping().create()); - - /** - * Util method to format {@link Throwable} response to JSON string in compact printing. - */ + private static final Gson PRETTY_PRINT_GSON = + AccessController.doPrivileged( + (PrivilegedAction) + () -> new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create()); + private static final Gson GSON = + AccessController.doPrivileged( + (PrivilegedAction) () -> new GsonBuilder().disableHtmlEscaping().create()); + + /** Util method to format {@link Throwable} response to JSON string in compact printing. */ public static String compactFormat(Throwable t) { - JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), - t.getMessage()); + JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), t.getMessage()); return compactJsonify(error); } - /** - * Util method to format {@link Throwable} response to JSON string in pretty printing. - */ - public static String prettyFormat(Throwable t) { - JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), - t.getMessage()); + /** Util method to format {@link Throwable} response to JSON string in pretty printing. */ + public static String prettyFormat(Throwable t) { + JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), t.getMessage()); return prettyJsonify(error); } public static String compactJsonify(Object jsonObject) { - return AccessController.doPrivileged( - (PrivilegedAction) () -> GSON.toJson(jsonObject)); + return AccessController.doPrivileged((PrivilegedAction) () -> GSON.toJson(jsonObject)); } public static String prettyJsonify(Object jsonObject) { diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java index 0575647dad..8c67d524b8 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import com.google.common.collect.ImmutableList; @@ -48,9 +47,8 @@ public String format(Throwable t) { } /** - * Sanitize methods are migrated from legacy CSV result. - * Sanitize both headers and data lines by: - * 1) Second double quote entire cell if any comma is found. + * Sanitize methods are migrated from legacy CSV result. Sanitize both headers and data lines by: + * 1) Second double quote entire cell if any comma is found. */ @Getter @RequiredArgsConstructor @@ -84,29 +82,30 @@ private List getHeaders(QueryResult response, boolean sanitize) { private List> getData(QueryResult response, boolean sanitize) { ImmutableList.Builder> dataLines = new ImmutableList.Builder<>(); - response.iterator().forEachRemaining(row -> { - ImmutableList.Builder line = new ImmutableList.Builder<>(); - // replace null values with empty string - Arrays.asList(row).forEach(val -> line.add(val == null ? "" : val.toString())); - dataLines.add(line.build()); - }); + response + .iterator() + .forEachRemaining( + row -> { + ImmutableList.Builder line = new ImmutableList.Builder<>(); + // replace null values with empty string + Arrays.asList(row).forEach(val -> line.add(val == null ? "" : val.toString())); + dataLines.add(line.build()); + }); List> result = dataLines.build(); return sanitizeData(result); } - /** - * Sanitize headers because OpenSearch allows special character present in field names. - */ + /** Sanitize headers because OpenSearch allows special character present in field names. */ private List sanitizeHeaders(List headers) { if (sanitize) { return headers.stream() - .map(this::sanitizeCell) - .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) - .collect(Collectors.toList()); + .map(this::sanitizeCell) + .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) + .collect(Collectors.toList()); } else { return headers.stream() - .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) - .collect(Collectors.toList()); + .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) + .collect(Collectors.toList()); } } @@ -114,14 +113,16 @@ private List> sanitizeData(List> lines) { List> result = new ArrayList<>(); if (sanitize) { for (List line : lines) { - result.add(line.stream() + result.add( + line.stream() .map(this::sanitizeCell) .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) .collect(Collectors.toList())); } } else { for (List line : lines) { - result.add(line.stream() + result.add( + line.stream() .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) .collect(Collectors.toList())); } @@ -138,13 +139,11 @@ private String sanitizeCell(String cell) { private String quoteIfRequired(String separator, String cell) { final String quote = "\""; - return cell.contains(separator) - ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; + return cell.contains(separator) ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; } private boolean isStartWithSensitiveChar(String cell) { return SENSITIVE_CHAR.stream().anyMatch(cell::startsWith); } } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java index 4291c09df0..8f22a5380e 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import com.google.common.base.Strings; @@ -20,8 +19,7 @@ public enum Format { RAW("raw"), VIZ("viz"); - @Getter - private final String formatName; + @Getter private final String formatName; private static final Map ALL_FORMATS; diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java index 1ad3ffde34..8be22af532 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import java.util.List; @@ -40,9 +39,7 @@ protected Object buildJsonObject(QueryResult response) { json.datarows(fetchDataRows(response)); // Populate other fields - json.total(response.size()) - .size(response.size()) - .status(200); + json.total(response.size()).size(response.size()).status(200); if (!response.getCursor().equals(Cursor.None)) { json.cursor(response.getCursor().toString()); } @@ -54,10 +51,7 @@ protected Object buildJsonObject(QueryResult response) { public String format(Throwable t) { int status = getStatus(t); ErrorMessage message = ErrorMessageFactory.createErrorMessage(t, status); - Error error = new Error( - message.getType(), - message.getReason(), - message.getDetails()); + Error error = new Error(message.getType(), message.getReason(), message.getDetails()); return jsonify(new JdbcErrorResponse(error, status)); } @@ -66,8 +60,8 @@ private Column fetchColumn(Schema.Column col) { } /** - * Convert type that exists in both legacy and new engine but has different name. - * Return old type name to avoid breaking impact on client-side. + * Convert type that exists in both legacy and new engine but has different name. Return old type + * name to avoid breaking impact on client-side. */ private String convertToLegacyType(ExprType type) { return type.legacyTypeName().toLowerCase(); @@ -83,18 +77,16 @@ private Object[][] fetchDataRows(QueryResult response) { } private int getStatus(Throwable t) { - return (t instanceof SyntaxCheckException - || t instanceof QueryEngineException) ? 400 : 503; + return (t instanceof SyntaxCheckException || t instanceof QueryEngineException) ? 400 : 503; } - /** - * org.json requires these inner data classes be public (and static) - */ + /** org.json requires these inner data classes be public (and static) */ @Builder @Getter public static class JdbcResponse { @Singular("column") private final List schema; + private final Object[][] datarows; private final long total; private final long size; @@ -125,5 +117,4 @@ public static class Error { private final String reason; private final String details; } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java index 810a7d0c2d..115ee77b2b 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.opensearch.sql.protocol.response.format.ErrorFormatter.compactFormat; @@ -24,16 +23,13 @@ @RequiredArgsConstructor public abstract class JsonResponseFormatter implements ResponseFormatter { - /** - * JSON format styles: pretty format or compact format without indent and space. - */ + /** JSON format styles: pretty format or compact format without indent and space. */ public enum Style { - PRETTY, COMPACT + PRETTY, + COMPACT } - /** - * JSON format style. - */ + /** JSON format style. */ private final Style style; public static final String CONTENT_TYPE = "application/json; charset=UTF-8"; @@ -45,8 +41,8 @@ public String format(R response) { @Override public String format(Throwable t) { - return AccessController.doPrivileged((PrivilegedAction) () -> - (style == PRETTY) ? prettyFormat(t) : compactFormat(t)); + return AccessController.doPrivileged( + (PrivilegedAction) () -> (style == PRETTY) ? prettyFormat(t) : compactFormat(t)); } public String contentType() { @@ -62,7 +58,8 @@ public String contentType() { protected abstract Object buildJsonObject(R response); protected String jsonify(Object jsonObject) { - return AccessController.doPrivileged((PrivilegedAction) () -> - (style == PRETTY) ? prettyJsonify(jsonObject) : compactJsonify(jsonObject)); + return AccessController.doPrivileged( + (PrivilegedAction) + () -> (style == PRETTY) ? prettyJsonify(jsonObject) : compactJsonify(jsonObject)); } } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java index 8fe88b2f95..3b64be7062 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java @@ -3,16 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; -/** - * Response formatter to format response to csv or raw format. - */ -//@RequiredArgsConstructor +/** Response formatter to format response to csv or raw format. */ public class RawResponseFormatter extends FlatResponseFormatter { public RawResponseFormatter() { super("|", false); } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java index 6d9cc093c5..6738cfbc9c 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java @@ -3,12 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; -/** - * Response formatter to format response to different formats. - */ +/** Response formatter to format response to different formats. */ public interface ResponseFormatter { /** @@ -33,5 +30,4 @@ public interface ResponseFormatter { * @return string */ String contentType(); - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java index ad705ccafa..c00174dc9f 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import java.util.List; @@ -43,8 +42,7 @@ public SimpleJsonResponseFormatter(Style style) { public Object buildJsonObject(QueryResult response) { JsonResponse.JsonResponseBuilder json = JsonResponse.builder(); - json.total(response.size()) - .size(response.size()); + json.total(response.size()).size(response.size()); response.columnNameTypes().forEach((name, type) -> json.column(new Column(name, type))); @@ -61,9 +59,7 @@ private Object[][] fetchDataRows(QueryResult response) { return rows; } - /** - * org.json requires these inner data classes be public (and static) - */ + /** org.json requires these inner data classes be public (and static) */ @Builder @Getter public static class JsonResponse { @@ -82,5 +78,4 @@ public static class Column { private final String name; private final String type; } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java index 7e971c9099..d5d220dd8d 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java @@ -72,21 +72,20 @@ protected Object buildJsonObject(QueryResult response) { public String format(Throwable t) { int status = getStatus(t); ErrorMessage message = ErrorMessageFactory.createErrorMessage(t, status); - VisualizationResponseFormatter.Error error = new Error( - message.getType(), - message.getReason(), - message.getDetails()); + VisualizationResponseFormatter.Error error = + new Error(message.getType(), message.getReason(), message.getDetails()); return jsonify(new VisualizationErrorResponse(error, status)); } private int getStatus(Throwable t) { - return (t instanceof SyntaxCheckException - || t instanceof QueryEngineException) ? 400 : 503; + return (t instanceof SyntaxCheckException || t instanceof QueryEngineException) ? 400 : 503; } private Map> fetchData(QueryResult response) { Map> columnMap = new LinkedHashMap<>(); - response.getSchema().getColumns() + response + .getSchema() + .getColumns() .forEach(column -> columnMap.put(column.getName(), new LinkedList<>())); for (Object[] dataRow : response) { @@ -107,16 +106,17 @@ private Metadata constructMetadata(QueryResult response) { private List fetchFields(QueryResult response) { List columns = response.getSchema().getColumns(); ImmutableList.Builder fields = ImmutableList.builder(); - columns.forEach(column -> { - Field field = new Field(column.getName(), convertToLegacyType(column.getExprType())); - fields.add(field); - }); + columns.forEach( + column -> { + Field field = new Field(column.getName(), convertToLegacyType(column.getExprType())); + fields.add(field); + }); return fields.build(); } /** - * Convert type that exists in both legacy and new engine but has different name. - * Return old type name to avoid breaking impact on client-side. + * Convert type that exists in both legacy and new engine but has different name. Return old type + * name to avoid breaking impact on client-side. */ private String convertToLegacyType(ExprType type) { return type.legacyTypeName().toLowerCase(); diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java index 4c58e189b8..e03169e9f8 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response; import static org.junit.jupiter.api.Assertions.assertArrayEquals; @@ -23,86 +22,77 @@ class QueryResultTest { - private ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", null, STRING), - new ExecutionEngine.Schema.Column("age", null, INTEGER))); - + private ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", null, STRING), + new ExecutionEngine.Schema.Column("age", null, INTEGER))); @Test void size() { - QueryResult response = new QueryResult( - schema, - Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Allen", "age", 30)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 40)) - ), Cursor.None); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Allen", "age", 30)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 40))), + Cursor.None); assertEquals(3, response.size()); } @Test void columnNameTypes() { - QueryResult response = new QueryResult( - schema, - Collections.singletonList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)) - ), Cursor.None); + QueryResult response = + new QueryResult( + schema, + Collections.singletonList(tupleValue(ImmutableMap.of("name", "John", "age", 20))), + Cursor.None); - assertEquals( - ImmutableMap.of("name", "string", "age", "integer"), - response.columnNameTypes() - ); + assertEquals(ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes()); } @Test void columnNameTypesWithAlias() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "n", STRING))); - QueryResult response = new QueryResult( - schema, - Collections.singletonList(tupleValue(ImmutableMap.of("n", "John"))), - Cursor.None); - - assertEquals( - ImmutableMap.of("n", "string"), - response.columnNameTypes() - ); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("name", "n", STRING))); + QueryResult response = + new QueryResult( + schema, + Collections.singletonList(tupleValue(ImmutableMap.of("n", "John"))), + Cursor.None); + + assertEquals(ImmutableMap.of("n", "string"), response.columnNameTypes()); } @Test void columnNameTypesFromEmptyExprValues() { - QueryResult response = new QueryResult( - schema, - Collections.emptyList(), Cursor.None); - assertEquals( - ImmutableMap.of("name", "string", "age", "integer"), - response.columnNameTypes() - ); + QueryResult response = new QueryResult(schema, Collections.emptyList(), Cursor.None); + assertEquals(ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes()); } @Test void columnNameTypesFromExprValuesWithMissing() { - QueryResult response = new QueryResult( - schema, - Arrays.asList( - tupleValue(ImmutableMap.of("name", "John")), - tupleValue(ImmutableMap.of("name", "John", "age", 20)) - )); - - assertEquals( - ImmutableMap.of("name", "string", "age", "integer"), - response.columnNameTypes() - ); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John")), + tupleValue(ImmutableMap.of("name", "John", "age", 20)))); + + assertEquals(ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes()); } @Test void iterate() { - QueryResult response = new QueryResult( - schema, - Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Allen", "age", 30)) - ), Cursor.None); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Allen", "age", 30))), + Cursor.None); int i = 0; for (Object[] objects : response) { @@ -116,5 +106,4 @@ void iterate() { i++; } } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java index 85efbab369..8e86e47754 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java @@ -29,32 +29,34 @@ public class CommandResponseFormatterTest { @Test public void produces_always_same_output_for_any_query_response() { var formatter = new CommandResponseFormatter(); - assertEquals(formatter.format(mock(QueryResult.class)), - formatter.format(mock(QueryResult.class))); + assertEquals( + formatter.format(mock(QueryResult.class)), formatter.format(mock(QueryResult.class))); - QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("address", "address", OpenSearchTextType.of()), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))), - ImmutableList.of( - tupleValue(ImmutableMap.builder() - .put("name", "John") - .put("address", "Seattle") - .put("age", 20) - .build())), - new Cursor("test_cursor")); + QueryResult response = + new QueryResult( + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column( + "address", "address", OpenSearchTextType.of()), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue( + ImmutableMap.builder() + .put("name", "John") + .put("address", "Seattle") + .put("age", 20) + .build())), + new Cursor("test_cursor")); - assertEquals("{\n" - + " \"succeeded\": true\n" - + "}", - formatter.format(response)); + assertEquals("{\n \"succeeded\": true\n}", formatter.format(response)); } @Test public void formats_error_as_default_formatter() { var exception = new Exception("pewpew", new RuntimeException("meow meow")); - assertEquals(new JdbcResponseFormatter(PRETTY).format(exception), + assertEquals( + new JdbcResponseFormatter(PRETTY).format(exception), new CommandResponseFormatter().format(exception)); } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java index 82b4f372b3..d27ac72373 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,20 +23,23 @@ import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.protocol.response.QueryResult; -/** - * Unit test for {@link CsvResponseFormatter}. - */ +/** Unit test for {@link CsvResponseFormatter}. */ public class CsvResponseFormatterTest { private static final CsvResponseFormatter formatter = new CsvResponseFormatter(); @Test void formatResponse() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); CsvResponseFormatter formatter = new CsvResponseFormatter(); String expected = "name,age%nJohn,20%nSmith,30"; assertEquals(format(expected), formatter.format(response)); @@ -45,49 +47,69 @@ void formatResponse() { @Test void sanitizeHeaders() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("=firstname", null, STRING), - new ExecutionEngine.Schema.Column("+lastname", null, STRING), - new ExecutionEngine.Schema.Column("-city", null, STRING), - new ExecutionEngine.Schema.Column("@age", null, INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of( - "=firstname", "John", "+lastname", "Smith", "-city", "Seattle", "@age", 20)))); - String expected = "'=firstname,'+lastname,'-city,'@age%n" - + "John,Smith,Seattle,20"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("=firstname", null, STRING), + new ExecutionEngine.Schema.Column("+lastname", null, STRING), + new ExecutionEngine.Schema.Column("-city", null, STRING), + new ExecutionEngine.Schema.Column("@age", null, INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue( + ImmutableMap.of( + "=firstname", + "John", + "+lastname", + "Smith", + "-city", + "Seattle", + "@age", + 20)))); + String expected = "'=firstname,'+lastname,'-city,'@age%nJohn,Smith,Seattle,20"; assertEquals(format(expected), formatter.format(response)); } @Test void sanitizeData() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "Seattle")), - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", "+Seattle")), - tupleValue(ImmutableMap.of("city", "-Seattle")), - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "Seattle=")))); - String expected = "city%n" - + "Seattle%n" - + "'=Seattle%n" - + "'+Seattle%n" - + "'-Seattle%n" - + "'@Seattle%n" - + "Seattle="; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "Seattle")), + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", "+Seattle")), + tupleValue(ImmutableMap.of("city", "-Seattle")), + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "Seattle=")))); + String expected = + "city%n" + + "Seattle%n" + + "'=Seattle%n" + + "'+Seattle%n" + + "'-Seattle%n" + + "'@Seattle%n" + + "Seattle="; assertEquals(format(expected), formatter.format(response)); } @Test void quoteIfRequired() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("na,me", "na,me", STRING), - new ExecutionEngine.Schema.Column(",,age", ",,age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("na,me", "John,Smith", ",,age", "30,,,")))); - String expected = "\"na,me\",\",,age\"%n" - + "\"John,Smith\",\"30,,,\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("na,me", "na,me", STRING), + new ExecutionEngine.Schema.Column(",,age", ",,age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList(tupleValue(ImmutableMap.of("na,me", "John,Smith", ",,age", "30,,,")))); + String expected = "\"na,me\",\",,age\"%n\"John,Smith\",\"30,,,\""; assertEquals(format(expected), formatter.format(response)); } @@ -102,32 +124,36 @@ void formatError() { @Test void escapeSanitize() { CsvResponseFormatter escapeFormatter = new CsvResponseFormatter(false); - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", ",,Seattle")))); - String expected = "city%n" - + "=Seattle%n" - + "\",,Seattle\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", ",,Seattle")))); + String expected = "city%n=Seattle%n\",,Seattle\""; assertEquals(format(expected), escapeFormatter.format(response)); } @Test void replaceNullValues() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John","city", "Seattle")), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); - String expected = "name,city%n" - + "John,Seattle%n" - + ",Seattle%n" - + "John,"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "city", "Seattle")), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); + String expected = "name,city%nJohn,Seattle%n,Seattle%nJohn,"; assertEquals(format(expected), formatter.format(response)); } @@ -135,5 +161,4 @@ void replaceNullValues() { void testContentType() { assertEquals(formatter.contentType(), CONTENT_TYPE); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java index e0e4355a24..7293048916 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -13,9 +12,7 @@ import java.util.Optional; import org.junit.jupiter.api.Test; -/** - * Unit test for {@link Format}. - */ +/** Unit test for {@link Format}. */ public class FormatTest { @Test @@ -58,5 +55,4 @@ void unsupportedFormat() { Optional format = Format.of("notsupport"); assertFalse(format.isPresent()); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java index 9c79b1bf89..16dd1590ee 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -43,26 +42,35 @@ class JdbcResponseFormatterTest { @Test void format_response() { - QueryResult response = new QueryResult( - new Schema(ImmutableList.of( - new Column("name", "name", STRING), - new Column("address1", "address1", OpenSearchTextType.of()), - new Column("address2", "address2", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), - new Column("location", "location", STRUCT), - new Column("employer", "employer", ARRAY), - new Column("age", "age", INTEGER))), - ImmutableList.of( - tupleValue(ImmutableMap.builder() - .put("name", "John") - .put("address1", "Seattle") - .put("address2", "WA") - .put("location", ImmutableMap.of("x", "1", "y", "2")) - .put("employments", ImmutableList.of( - ImmutableMap.of("name", "Amazon"), - ImmutableMap.of("name", "AWS"))) - .put("age", 20) - .build()))); + QueryResult response = + new QueryResult( + new Schema( + ImmutableList.of( + new Column("name", "name", STRING), + new Column("address1", "address1", OpenSearchTextType.of()), + new Column( + "address2", + "address2", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + new Column("location", "location", STRUCT), + new Column("employer", "employer", ARRAY), + new Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue( + ImmutableMap.builder() + .put("name", "John") + .put("address1", "Seattle") + .put("address2", "WA") + .put("location", ImmutableMap.of("x", "1", "y", "2")) + .put( + "employments", + ImmutableList.of( + ImmutableMap.of("name", "Amazon"), ImmutableMap.of("name", "AWS"))) + .put("age", 20) + .build()))); assertJsonEquals( "{" @@ -76,7 +84,8 @@ void format_response() { + "]," + "\"datarows\":[" + "[\"John\",\"Seattle\",\"WA\",{\"x\":\"1\",\"y\":\"2\"}," - + "[{\"name\":\"Amazon\"}," + "{\"name\":\"AWS\"}]," + + "[{\"name\":\"Amazon\"}," + + "{\"name\":\"AWS\"}]," + "20]]," + "\"total\":1," + "\"size\":1," @@ -86,18 +95,21 @@ void format_response() { @Test void format_response_with_cursor() { - QueryResult response = new QueryResult( - new Schema(ImmutableList.of( - new Column("name", "name", STRING), - new Column("address", "address", OpenSearchTextType.of()), - new Column("age", "age", INTEGER))), - ImmutableList.of( - tupleValue(ImmutableMap.builder() - .put("name", "John") - .put("address", "Seattle") - .put("age", 20) - .build())), - new Cursor("test_cursor")); + QueryResult response = + new QueryResult( + new Schema( + ImmutableList.of( + new Column("name", "name", STRING), + new Column("address", "address", OpenSearchTextType.of()), + new Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue( + ImmutableMap.builder() + .put("name", "John") + .put("address", "Seattle") + .put("age", 20) + .build())), + new Cursor("test_cursor")); assertJsonEquals( "{" @@ -119,9 +131,9 @@ void format_response_with_cursor() { void format_response_with_missing_and_null_value() { QueryResult response = new QueryResult( - new Schema(ImmutableList.of( - new Column("name", null, STRING), - new Column("age", null, INTEGER))), + new Schema( + ImmutableList.of( + new Column("name", null, STRING), new Column("age", null, INTEGER))), Arrays.asList( ExprTupleValue.fromExprValueMap( ImmutableMap.of("name", stringValue("John"), "age", LITERAL_MISSING)), @@ -147,8 +159,7 @@ void format_client_error_response_due_to_syntax_exception() { + "\"details\":\"Invalid query syntax\"" + "}," + "\"status\":400}", - formatter.format(new SyntaxCheckException("Invalid query syntax")) - ); + formatter.format(new SyntaxCheckException("Invalid query syntax"))); } @Test @@ -161,8 +172,7 @@ void format_client_error_response_due_to_semantic_exception() { + "\"details\":\"Invalid query semantics\"" + "}," + "\"status\":400}", - formatter.format(new SemanticCheckException("Invalid query semantics")) - ); + formatter.format(new SemanticCheckException("Invalid query semantics"))); } @Test @@ -175,8 +185,7 @@ void format_server_error_response() { + "\"details\":\"Execution error\"" + "}," + "\"status\":503}", - formatter.format(new IllegalStateException("Execution error")) - ); + formatter.format(new IllegalStateException("Execution error"))); } @Test @@ -193,15 +202,12 @@ void format_server_error_response_due_to_opensearch() { + "from OpenSearch engine.\"" + "}," + "\"status\":503}", - formatter.format(new OpenSearchException("all shards failed", - new IllegalStateException("Execution error"))) - ); + formatter.format( + new OpenSearchException( + "all shards failed", new IllegalStateException("Execution error")))); } private static void assertJsonEquals(String expected, String actual) { - assertEquals( - JsonParser.parseString(expected), - JsonParser.parseString(actual)); + assertEquals(JsonParser.parseString(expected), JsonParser.parseString(actual)); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java index b33a4f216a..65111bd3b9 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,69 +23,92 @@ import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.protocol.response.QueryResult; -/** - * Unit test for {@link FlatResponseFormatter}. - */ +/** Unit test for {@link FlatResponseFormatter}. */ public class RawResponseFormatterTest { private FlatResponseFormatter rawFormatter = new RawResponseFormatter(); @Test void formatResponse() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); String expected = "name|age%nJohn|20%nSmith|30"; assertEquals(format(expected), rawFormatter.format(response)); } @Test void sanitizeHeaders() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("=firstname", null, STRING), - new ExecutionEngine.Schema.Column("+lastname", null, STRING), - new ExecutionEngine.Schema.Column("-city", null, STRING), - new ExecutionEngine.Schema.Column("@age", null, INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of( - "=firstname", "John", "+lastname", "Smith", "-city", "Seattle", "@age", 20)))); - String expected = "=firstname|+lastname|-city|@age%n" - + "John|Smith|Seattle|20"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("=firstname", null, STRING), + new ExecutionEngine.Schema.Column("+lastname", null, STRING), + new ExecutionEngine.Schema.Column("-city", null, STRING), + new ExecutionEngine.Schema.Column("@age", null, INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue( + ImmutableMap.of( + "=firstname", + "John", + "+lastname", + "Smith", + "-city", + "Seattle", + "@age", + 20)))); + String expected = "=firstname|+lastname|-city|@age%nJohn|Smith|Seattle|20"; assertEquals(format(expected), rawFormatter.format(response)); } @Test void sanitizeData() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "Seattle")), - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", "+Seattle")), - tupleValue(ImmutableMap.of("city", "-Seattle")), - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "Seattle=")))); - String expected = "city%n" - + "Seattle%n" - + "=Seattle%n" - + "+Seattle%n" - + "-Seattle%n" - + "@Seattle%n" - + "Seattle="; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "Seattle")), + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", "+Seattle")), + tupleValue(ImmutableMap.of("city", "-Seattle")), + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "Seattle=")))); + String expected = + "city%n" + + "Seattle%n" + + "=Seattle%n" + + "+Seattle%n" + + "-Seattle%n" + + "@Seattle%n" + + "Seattle="; assertEquals(format(expected), rawFormatter.format(response)); } @Test void quoteIfRequired() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("na|me", "na|me", STRING), - new ExecutionEngine.Schema.Column("||age", "||age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("na|me", "John|Smith", "||age", "30|||")))); - String expected = "\"na|me\"|\"||age\"%n" - + "\"John|Smith\"|\"30|||\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("na|me", "na|me", STRING), + new ExecutionEngine.Schema.Column("||age", "||age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList(tupleValue(ImmutableMap.of("na|me", "John|Smith", "||age", "30|||")))); + String expected = "\"na|me\"|\"||age\"%n\"John|Smith\"|\"30|||\""; assertEquals(format(expected), rawFormatter.format(response)); } @@ -101,59 +123,67 @@ void formatError() { @Test void escapeSanitize() { FlatResponseFormatter escapeFormatter = new RawResponseFormatter(); - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", "||Seattle")))); - String expected = "city%n" - + "=Seattle%n" - + "\"||Seattle\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", "||Seattle")))); + String expected = "city%n=Seattle%n\"||Seattle\""; assertEquals(format(expected), escapeFormatter.format(response)); } @Test void senstiveCharater() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "++Seattle")))); - String expected = "city%n" - + "@Seattle%n" - + "++Seattle"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "++Seattle")))); + String expected = "city%n@Seattle%n++Seattle"; assertEquals(format(expected), rawFormatter.format(response)); } @Test void senstiveCharaterWithSanitize() { FlatResponseFormatter testFormater = new RawResponseFormatter(); - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "++Seattle|||")))); - String expected = "city%n" - + "@Seattle%n" - + "\"++Seattle|||\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "++Seattle|||")))); + String expected = "city%n@Seattle%n\"++Seattle|||\""; assertEquals(format(expected), testFormater.format(response)); } @Test void replaceNullValues() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John","city", "Seattle")), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); - String expected = "name|city%n" - + "John|Seattle%n" - + "|Seattle%n" - + "John|"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "city", "Seattle")), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); + String expected = "name|city%nJohn|Seattle%n|Seattle%nJohn|"; assertEquals(format(expected), rawFormatter.format(response)); } @@ -161,5 +191,4 @@ void replaceNullValues() { void testContentType() { assertEquals(rawFormatter.contentType(), CONTENT_TYPE); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java index 8b4438cf91..e5eb0f1ac7 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -25,9 +24,11 @@ class SimpleJsonResponseFormatterTest { - private final ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("firstname", null, STRING), - new ExecutionEngine.Schema.Column("age", null, INTEGER))); + private final ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("firstname", null, STRING), + new ExecutionEngine.Schema.Column("age", null, INTEGER))); @Test void formatResponse() { @@ -84,12 +85,12 @@ void formatResponsePretty() { @Test void formatResponseSchemaWithAlias() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("firstname", "name", STRING))); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("firstname", "name", STRING))); QueryResult response = new QueryResult( - schema, - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)))); + schema, ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( "{\"schema\":[{\"name\":\"name\",\"type\":\"string\"}]," @@ -120,10 +121,13 @@ void formatResponseWithTupleValue() { new QueryResult( schema, Arrays.asList( - tupleValue(ImmutableMap - .of("name", "Smith", - "address", ImmutableMap.of("state", "WA", "street", - ImmutableMap.of("city", "seattle")))))); + tupleValue( + ImmutableMap.of( + "name", + "Smith", + "address", + ImmutableMap.of( + "state", "WA", "street", ImmutableMap.of("city", "seattle")))))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( @@ -140,11 +144,13 @@ void formatResponseWithArrayValue() { new QueryResult( schema, Arrays.asList( - tupleValue(ImmutableMap - .of("name", "Smith", - "address", Arrays.asList( - ImmutableMap.of("state", "WA"), ImmutableMap.of("state", "NYC") - ))))); + tupleValue( + ImmutableMap.of( + "name", + "Smith", + "address", + Arrays.asList( + ImmutableMap.of("state", "WA"), ImmutableMap.of("state", "NYC")))))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( "{\"schema\":[{\"name\":\"firstname\",\"type\":\"string\"}," diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java index f501a53d64..a6fdd1e03e 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java @@ -24,18 +24,21 @@ import org.opensearch.sql.protocol.response.QueryResult; public class VisualizationResponseFormatterTest { - private final VisualizationResponseFormatter formatter = new VisualizationResponseFormatter( - JsonResponseFormatter.Style.COMPACT); + private final VisualizationResponseFormatter formatter = + new VisualizationResponseFormatter(JsonResponseFormatter.Style.COMPACT); @Test void formatResponse() { - QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))), - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), - tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); + QueryResult response = + new QueryResult( + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), + tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); assertJsonEquals( "{\"data\":{" @@ -55,10 +58,12 @@ void formatResponse() { void formatResponseWithNull() { QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", null, STRING), - new ExecutionEngine.Schema.Column("age", null, INTEGER))), - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", LITERAL_MISSING)), + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", null, STRING), + new ExecutionEngine.Schema.Column("age", null, INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.of("name", "John", "age", LITERAL_MISSING)), tupleValue(ImmutableMap.of("name", "Allen", "age", LITERAL_NULL)), tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); @@ -73,8 +78,7 @@ void formatResponseWithNull() { + "\"size\":3," + "\"status\":200" + "}", - formatter.format(response) - ); + formatter.format(response)); } @Test @@ -87,8 +91,7 @@ void clientErrorSyntaxException() { + "\"details\":\"Invalid query syntax\"" + "}," + "\"status\":400}", - formatter.format(new SyntaxCheckException("Invalid query syntax")) - ); + formatter.format(new SyntaxCheckException("Invalid query syntax"))); } @Test @@ -101,8 +104,7 @@ void clientErrorSemanticException() { + "\"details\":\"Invalid query semantics\"" + "}," + "\"status\":400}", - formatter.format(new SemanticCheckException("Invalid query semantics")) - ); + formatter.format(new SemanticCheckException("Invalid query semantics"))); } @Test @@ -115,8 +117,7 @@ void serverError() { + "\"details\":\"Execution error\"" + "}," + "\"status\":503}", - formatter.format(new IllegalStateException("Execution error")) - ); + formatter.format(new IllegalStateException("Execution error"))); } @Test @@ -133,22 +134,25 @@ void opensearchServerError() { + "from OpenSearch engine.\"" + "}," + "\"status\":503}", - formatter.format(new OpenSearchException("all shards failed", - new IllegalStateException("Execution error"))) - ); + formatter.format( + new OpenSearchException( + "all shards failed", new IllegalStateException("Execution error")))); } @Test void prettyStyle() { - VisualizationResponseFormatter prettyFormatter = new VisualizationResponseFormatter( - JsonResponseFormatter.Style.PRETTY); - QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))), - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), - tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); + VisualizationResponseFormatter prettyFormatter = + new VisualizationResponseFormatter(JsonResponseFormatter.Style.PRETTY); + QueryResult response = + new QueryResult( + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), + tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); assertJsonEquals( "{\n" @@ -179,14 +183,11 @@ void prettyStyle() { + " \"size\": 3,\n" + " \"status\": 200\n" + "}", - prettyFormatter.format(response) - ); + prettyFormatter.format(response)); } private static void assertJsonEquals(String expected, String actual) { - assertEquals( - JsonParser.parseString(expected), - JsonParser.parseString(actual)); + assertEquals(JsonParser.parseString(expected), JsonParser.parseString(actual)); } @Test diff --git a/spark/build.gradle b/spark/build.gradle index 89842e5ea8..2608b88ced 100644 --- a/spark/build.gradle +++ b/spark/build.gradle @@ -13,6 +13,9 @@ repositories { mavenCentral() } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + dependencies { api project(':core') implementation project(':datasources') diff --git a/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java b/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java index 1e2475c196..1a3304994b 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java +++ b/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java @@ -36,12 +36,16 @@ public class EmrClientImpl implements SparkClient { /** * Constructor for EMR Client Implementation. * - * @param emr EMR helper - * @param flint Opensearch args for flint integration jar + * @param emr EMR helper + * @param flint Opensearch args for flint integration jar * @param sparkResponse Response object to help with retrieving results from Opensearch index */ - public EmrClientImpl(AmazonElasticMapReduce emr, String emrCluster, FlintHelper flint, - SparkResponse sparkResponse, String sparkApplicationJar) { + public EmrClientImpl( + AmazonElasticMapReduce emr, + String emrCluster, + FlintHelper flint, + SparkResponse sparkResponse, + String sparkApplicationJar) { this.emr = emr; this.emrCluster = emrCluster; this.flint = flint; @@ -59,38 +63,39 @@ public JSONObject sql(String query) throws IOException { @VisibleForTesting void runEmrApplication(String query) { - HadoopJarStepConfig stepConfig = new HadoopJarStepConfig() - .withJar("command-runner.jar") - .withArgs("spark-submit", - "--class","org.opensearch.sql.SQLJob", - "--jars", - flint.getFlintIntegrationJar(), - sparkApplicationJar, - query, - SPARK_INDEX_NAME, - flint.getFlintHost(), - flint.getFlintPort(), - flint.getFlintScheme(), - flint.getFlintAuth(), - flint.getFlintRegion() - ); + HadoopJarStepConfig stepConfig = + new HadoopJarStepConfig() + .withJar("command-runner.jar") + .withArgs( + "spark-submit", + "--class", + "org.opensearch.sql.SQLJob", + "--jars", + flint.getFlintIntegrationJar(), + sparkApplicationJar, + query, + SPARK_INDEX_NAME, + flint.getFlintHost(), + flint.getFlintPort(), + flint.getFlintScheme(), + flint.getFlintAuth(), + flint.getFlintRegion()); - StepConfig emrstep = new StepConfig() - .withName("Spark Application") - .withActionOnFailure(ActionOnFailure.CONTINUE) - .withHadoopJarStep(stepConfig); + StepConfig emrstep = + new StepConfig() + .withName("Spark Application") + .withActionOnFailure(ActionOnFailure.CONTINUE) + .withHadoopJarStep(stepConfig); - AddJobFlowStepsRequest request = new AddJobFlowStepsRequest() - .withJobFlowId(emrCluster) - .withSteps(emrstep); + AddJobFlowStepsRequest request = + new AddJobFlowStepsRequest().withJobFlowId(emrCluster).withSteps(emrstep); AddJobFlowStepsResult result = emr.addJobFlowSteps(request); logger.info("EMR step ID: " + result.getStepIds()); String stepId = result.getStepIds().get(0); - DescribeStepRequest stepRequest = new DescribeStepRequest() - .withClusterId(emrCluster) - .withStepId(stepId); + DescribeStepRequest stepRequest = + new DescribeStepRequest().withClusterId(emrCluster).withStepId(stepId); waitForStepExecution(stepRequest); sparkResponse.setValue(stepId); @@ -117,5 +122,4 @@ private void waitForStepExecution(DescribeStepRequest stepRequest) { } } } - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java b/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java index 99d8600dd0..b38f04680b 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java +++ b/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java @@ -8,15 +8,13 @@ import java.io.IOException; import org.json.JSONObject; -/** - * Interface class for Spark Client. - */ +/** Interface class for Spark Client. */ public interface SparkClient { /** * This method executes spark sql query. * * @param query spark sql query - * @return spark query response + * @return spark query response */ JSONObject sql(String query) throws IOException; } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java b/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java index 1936c266de..914aa80085 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java @@ -24,9 +24,7 @@ import org.opensearch.sql.spark.storage.SparkTable; import org.opensearch.sql.storage.Table; -/** - * Spark SQL function implementation. - */ +/** Spark SQL function implementation. */ public class SparkSqlFunctionImplementation extends FunctionExpression implements TableFunctionImplementation { @@ -38,8 +36,8 @@ public class SparkSqlFunctionImplementation extends FunctionExpression * Constructor for spark sql function. * * @param functionName name of the function - * @param arguments a list of expressions - * @param sparkClient spark client + * @param arguments a list of expressions + * @param sparkClient spark client */ public SparkSqlFunctionImplementation( FunctionName functionName, List arguments, SparkClient sparkClient) { @@ -51,9 +49,11 @@ public SparkSqlFunctionImplementation( @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException(String.format( - "Spark defined function [%s] is only " - + "supported in SOURCE clause with spark connector catalog", functionName)); + throw new UnsupportedOperationException( + String.format( + "Spark defined function [%s] is only " + + "supported in SOURCE clause with spark connector catalog", + functionName)); } @Override @@ -63,11 +63,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", - ((NamedArgumentExpression) arg).getArgName(), - ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } @@ -80,23 +84,23 @@ public Table applyArguments() { * This method builds a spark query request. * * @param arguments spark sql function arguments - * @return spark query request + * @return spark query request */ private SparkQueryRequest buildQueryFromSqlFunction(List arguments) { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); - arguments.forEach(arg -> { - String argName = ((NamedArgumentExpression) arg).getArgName(); - Expression argValue = ((NamedArgumentExpression) arg).getValue(); - ExprValue literalValue = argValue.valueOf(); - if (argName.equals(QUERY)) { - sparkQueryRequest.setSql((String) literalValue.value()); - } else { - throw new ExpressionEvaluationException( - String.format("Invalid Function Argument:%s", argName)); - } - }); + arguments.forEach( + arg -> { + String argName = ((NamedArgumentExpression) arg).getArgName(); + Expression argValue = ((NamedArgumentExpression) arg).getValue(); + ExprValue literalValue = argValue.valueOf(); + if (argName.equals(QUERY)) { + sparkQueryRequest.setSql((String) literalValue.value()); + } else { + throw new ExpressionEvaluationException( + String.format("Invalid Function Argument:%s", argName)); + } + }); return sparkQueryRequest; } - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java b/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java index 624600e1a8..a4f2a6c0fe 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java @@ -22,9 +22,7 @@ import org.opensearch.sql.spark.client.SparkClient; import org.opensearch.sql.spark.functions.implementation.SparkSqlFunctionImplementation; -/** - * Function resolver for sql function of spark connector. - */ +/** Function resolver for sql function of spark connector. */ @RequiredArgsConstructor public class SparkSqlTableFunctionResolver implements FunctionResolver { private final SparkClient sparkClient; @@ -35,35 +33,44 @@ public class SparkSqlTableFunctionResolver implements FunctionResolver { @Override public Pair resolve(FunctionSignature unresolvedSignature) { FunctionName functionName = FunctionName.of(SQL); - FunctionSignature functionSignature = - new FunctionSignature(functionName, List.of(STRING)); + FunctionSignature functionSignature = new FunctionSignature(functionName, List.of(STRING)); final List argumentNames = List.of(QUERY); - FunctionBuilder functionBuilder = (functionProperties, arguments) -> { - Boolean argumentsPassedByName = arguments.stream() - .noneMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); - Boolean argumentsPassedByPosition = arguments.stream() - .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); - if (!(argumentsPassedByName || argumentsPassedByPosition)) { - throw new SemanticCheckException("Arguments should be either passed by name or position"); - } + FunctionBuilder functionBuilder = + (functionProperties, arguments) -> { + Boolean argumentsPassedByName = + arguments.stream() + .noneMatch( + arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + Boolean argumentsPassedByPosition = + arguments.stream() + .allMatch( + arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + if (!(argumentsPassedByName || argumentsPassedByPosition)) { + throw new SemanticCheckException( + "Arguments should be either passed by name or position"); + } - if (arguments.size() != argumentNames.size()) { - throw new SemanticCheckException( - String.format("Missing arguments:[%s]", - String.join(",", argumentNames.subList(arguments.size(), argumentNames.size())))); - } + if (arguments.size() != argumentNames.size()) { + throw new SemanticCheckException( + String.format( + "Missing arguments:[%s]", + String.join( + ",", argumentNames.subList(arguments.size(), argumentNames.size())))); + } - if (argumentsPassedByPosition) { - List namedArguments = new ArrayList<>(); - for (int i = 0; i < arguments.size(); i++) { - namedArguments.add(new NamedArgumentExpression(argumentNames.get(i), - ((NamedArgumentExpression) arguments.get(i)).getValue())); - } - return new SparkSqlFunctionImplementation(functionName, namedArguments, sparkClient); - } - return new SparkSqlFunctionImplementation(functionName, arguments, sparkClient); - }; + if (argumentsPassedByPosition) { + List namedArguments = new ArrayList<>(); + for (int i = 0; i < arguments.size(); i++) { + namedArguments.add( + new NamedArgumentExpression( + argumentNames.get(i), + ((NamedArgumentExpression) arguments.get(i)).getValue())); + } + return new SparkSqlFunctionImplementation(functionName, namedArguments, sparkClient); + } + return new SparkSqlFunctionImplementation(functionName, arguments, sparkClient); + }; return Pair.of(functionSignature, functionBuilder); } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java b/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java index cb2b31ddc1..823ad2da29 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java @@ -29,9 +29,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Default implementation of SparkSqlFunctionResponseHandle. - */ +/** Default implementation of SparkSqlFunctionResponseHandle. */ public class DefaultSparkSqlFunctionResponseHandle implements SparkSqlFunctionResponseHandle { private Iterator responseIterator; private ExecutionEngine.Schema schema; @@ -54,8 +52,8 @@ private void constructIteratorAndSchema(JSONObject responseObject) { logger.info("Spark Application ID: " + items.getString("applicationId")); columnList = getColumnList(items.getJSONArray("schema")); for (int i = 0; i < items.getJSONArray("result").length(); i++) { - JSONObject row = new JSONObject( - items.getJSONArray("result").get(i).toString().replace("'", "\"")); + JSONObject row = + new JSONObject(items.getJSONArray("result").get(i).toString().replace("'", "\"")); LinkedHashMap linkedHashMap = extractRow(row, columnList); result.add(new ExprTupleValue(linkedHashMap)); } @@ -85,8 +83,8 @@ private static LinkedHashMap extractRow( } else if (type == ExprCoreType.DATE) { linkedHashMap.put(column.getName(), new ExprDateValue(row.getString(column.getName()))); } else if (type == ExprCoreType.TIMESTAMP) { - linkedHashMap.put(column.getName(), - new ExprTimestampValue(row.getString(column.getName()))); + linkedHashMap.put( + column.getName(), new ExprTimestampValue(row.getString(column.getName()))); } else if (type == ExprCoreType.STRING) { linkedHashMap.put(column.getName(), new ExprStringValue(row.getString(column.getName()))); } else { @@ -101,10 +99,11 @@ private List getColumnList(JSONArray schema) { List columnList = new ArrayList<>(); for (int i = 0; i < schema.length(); i++) { JSONObject column = new JSONObject(schema.get(i).toString().replace("'", "\"")); - columnList.add(new ExecutionEngine.Schema.Column( - column.get("column_name").toString(), - column.get("column_name").toString(), - getDataType(column.get("data_type").toString()))); + columnList.add( + new ExecutionEngine.Schema.Column( + column.get("column_name").toString(), + column.get("column_name").toString(), + getDataType(column.get("data_type").toString()))); } return columnList; } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java b/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java index da68b591eb..a9be484712 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java @@ -8,24 +8,18 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Handle Spark response. - */ +/** Handle Spark response. */ public interface SparkSqlFunctionResponseHandle { - /** - * Return true if Spark response has more result. - */ + /** Return true if Spark response has more result. */ boolean hasNext(); /** - * Return Spark response as {@link ExprValue}. Attention, the method must been called when - * hasNext return true. + * Return Spark response as {@link ExprValue}. Attention, the method must been called when hasNext + * return true. */ ExprValue next(); - /** - * Return ExecutionEngine.Schema of the Spark response. - */ + /** Return ExecutionEngine.Schema of the Spark response. */ ExecutionEngine.Schema schema(); } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java index 28ce7dd19a..aea8f72f36 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java @@ -12,9 +12,7 @@ import org.opensearch.sql.storage.TableScanOperator; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * TableScanBuilder for sql function of spark connector. - */ +/** TableScanBuilder for sql function of spark connector. */ @AllArgsConstructor public class SparkSqlFunctionTableScanBuilder extends TableScanBuilder { diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java index 85e854e422..a2e44affd5 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java @@ -21,9 +21,7 @@ import org.opensearch.sql.spark.request.SparkQueryRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * This a table scan operator to handle sql table function. - */ +/** This a table scan operator to handle sql table function. */ @RequiredArgsConstructor public class SparkSqlFunctionTableScanOperator extends TableScanOperator { private final SparkClient sparkClient; @@ -34,17 +32,19 @@ public class SparkSqlFunctionTableScanOperator extends TableScanOperator { @Override public void open() { super.open(); - this.sparkResponseHandle = AccessController.doPrivileged( - (PrivilegedAction) () -> { - try { - JSONObject responseObject = sparkClient.sql(request.getSql()); - return new DefaultSparkSqlFunctionResponseHandle(responseObject); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException( - String.format("Error fetching data from spark server: %s", e.getMessage())); - } - }); + this.sparkResponseHandle = + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + JSONObject responseObject = sparkClient.sql(request.getSql()); + return new DefaultSparkSqlFunctionResponseHandle(responseObject); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + String.format("Error fetching data from spark server: %s", e.getMessage())); + } + }); } @Override diff --git a/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java b/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java index b3c3c0871a..10d880187f 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java +++ b/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java @@ -15,25 +15,20 @@ import lombok.Getter; public class FlintHelper { - @Getter - private final String flintIntegrationJar; - @Getter - private final String flintHost; - @Getter - private final String flintPort; - @Getter - private final String flintScheme; - @Getter - private final String flintAuth; - @Getter - private final String flintRegion; + @Getter private final String flintIntegrationJar; + @Getter private final String flintHost; + @Getter private final String flintPort; + @Getter private final String flintScheme; + @Getter private final String flintAuth; + @Getter private final String flintRegion; - /** Arguments required to write data to opensearch index using flint integration. + /** + * Arguments required to write data to opensearch index using flint integration. * - * @param flintHost Opensearch host for flint - * @param flintPort Opensearch port for flint integration + * @param flintHost Opensearch host for flint + * @param flintPort Opensearch port for flint integration * @param flintScheme Opensearch scheme for flint integration - * @param flintAuth Opensearch auth for flint integration + * @param flintAuth Opensearch auth for flint integration * @param flintRegion Opensearch region for flint integration */ public FlintHelper( diff --git a/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java b/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java index bc0944a784..94c9795161 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java +++ b/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java @@ -7,15 +7,10 @@ import lombok.Data; -/** - * Spark query request. - */ +/** Spark query request. */ @Data public class SparkQueryRequest { - /** - * SQL. - */ + /** SQL. */ private String sql; - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java index f30072eb3f..3edb541384 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java +++ b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java @@ -36,8 +36,8 @@ public class SparkResponse { * Response for spark sql query. * * @param client Opensearch client - * @param value Identifier field value - * @param field Identifier field name + * @param value Identifier field value + * @param field Identifier field name */ public SparkResponse(Client client, String value, String field) { this.client = client; @@ -64,8 +64,10 @@ private JSONObject searchInSparkIndex(QueryBuilder query) { SearchResponse searchResponse = searchResponseActionFuture.actionGet(); if (searchResponse.status().getStatus() != 200) { throw new RuntimeException( - "Fetching result from " + SPARK_INDEX_NAME + " index failed with status : " - + searchResponse.status()); + "Fetching result from " + + SPARK_INDEX_NAME + + " index failed with status : " + + searchResponse.status()); } else { JSONObject data = new JSONObject(); for (SearchHit searchHit : searchResponse.getHits().getHits()) { @@ -90,11 +92,11 @@ void deleteInSparkIndex(String id) { if (deleteResponse.getResult().equals(DocWriteResponse.Result.DELETED)) { LOG.debug("Spark result successfully deleted ", id); } else if (deleteResponse.getResult().equals(DocWriteResponse.Result.NOT_FOUND)) { - throw new ResourceNotFoundException("Spark result with id " - + id + " doesn't exist"); + throw new ResourceNotFoundException("Spark result with id " + id + " doesn't exist"); } else { - throw new RuntimeException("Deleting spark result information failed with : " - + deleteResponse.getResult().getLowercase()); + throw new RuntimeException( + "Deleting spark result information failed with : " + + deleteResponse.getResult().getLowercase()); } } } diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java index 3897e8690e..395e1685a6 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java @@ -14,21 +14,14 @@ import org.opensearch.sql.spark.request.SparkQueryRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * Spark scan operator. - */ +/** Spark scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class SparkScan extends TableScanOperator { private final SparkClient sparkClient; - @EqualsAndHashCode.Include - @Getter - @Setter - @ToString.Include - private SparkQueryRequest request; - + @EqualsAndHashCode.Include @Getter @Setter @ToString.Include private SparkQueryRequest request; /** * Constructor. @@ -54,5 +47,4 @@ public ExprValue next() { public String explain() { return getRequest().toString(); } - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java index a5e35ecc4c..84c9c05e79 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java @@ -15,17 +15,14 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; -/** - * Spark storage engine implementation. - */ +/** Spark storage engine implementation. */ @RequiredArgsConstructor public class SparkStorageEngine implements StorageEngine { private final SparkClient sparkClient; @Override public Collection getFunctions() { - return Collections.singletonList( - new SparkSqlTableFunctionResolver(sparkClient)); + return Collections.singletonList(new SparkSqlTableFunctionResolver(sparkClient)); } @Override diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java index 937679b50e..467bacbaea 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java @@ -30,9 +30,7 @@ import org.opensearch.sql.storage.DataSourceFactory; import org.opensearch.sql.storage.StorageEngine; -/** - * Storage factory implementation for spark connector. - */ +/** Storage factory implementation for spark connector. */ @RequiredArgsConstructor public class SparkStorageFactory implements DataSourceFactory { private final Client client; @@ -66,9 +64,7 @@ public DataSourceType getDataSourceType() { @Override public DataSource createDataSource(DataSourceMetadata metadata) { return new DataSource( - metadata.getName(), - DataSourceType.SPARK, - getStorageEngine(metadata.getProperties())); + metadata.getName(), DataSourceType.SPARK, getStorageEngine(metadata.getProperties())); } /** @@ -81,24 +77,26 @@ StorageEngine getStorageEngine(Map requiredConfig) { SparkClient sparkClient; if (requiredConfig.get(CONNECTOR_TYPE).equals(EMR)) { sparkClient = - AccessController.doPrivileged((PrivilegedAction) () -> { - validateEMRConfigProperties(requiredConfig); - return new EmrClientImpl( - getEMRClient( - requiredConfig.get(EMR_ACCESS_KEY), - requiredConfig.get(EMR_SECRET_KEY), - requiredConfig.get(EMR_REGION)), - requiredConfig.get(EMR_CLUSTER), - new FlintHelper( - requiredConfig.get(FLINT_INTEGRATION), - requiredConfig.get(FLINT_HOST), - requiredConfig.get(FLINT_PORT), - requiredConfig.get(FLINT_SCHEME), - requiredConfig.get(FLINT_AUTH), - requiredConfig.get(FLINT_REGION)), - new SparkResponse(client, null, STEP_ID_FIELD), - requiredConfig.get(SPARK_SQL_APPLICATION)); - }); + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + validateEMRConfigProperties(requiredConfig); + return new EmrClientImpl( + getEMRClient( + requiredConfig.get(EMR_ACCESS_KEY), + requiredConfig.get(EMR_SECRET_KEY), + requiredConfig.get(EMR_REGION)), + requiredConfig.get(EMR_CLUSTER), + new FlintHelper( + requiredConfig.get(FLINT_INTEGRATION), + requiredConfig.get(FLINT_HOST), + requiredConfig.get(FLINT_PORT), + requiredConfig.get(FLINT_SCHEME), + requiredConfig.get(FLINT_AUTH), + requiredConfig.get(FLINT_REGION)), + new SparkResponse(client, null, STEP_ID_FIELD), + requiredConfig.get(SPARK_SQL_APPLICATION)); + }); } else { throw new InvalidParameterException("Spark connector type is invalid."); } @@ -110,12 +108,14 @@ private void validateEMRConfigProperties(Map dataSourceMetadataC if (dataSourceMetadataConfig.get(EMR_CLUSTER) == null || dataSourceMetadataConfig.get(EMR_AUTH_TYPE) == null) { throw new IllegalArgumentException("EMR config properties are missing."); - } else if (dataSourceMetadataConfig.get(EMR_AUTH_TYPE) - .equals(AuthenticationType.AWSSIGV4AUTH.getName()) + } else if (dataSourceMetadataConfig + .get(EMR_AUTH_TYPE) + .equals(AuthenticationType.AWSSIGV4AUTH.getName()) && (dataSourceMetadataConfig.get(EMR_ACCESS_KEY) == null - || dataSourceMetadataConfig.get(EMR_SECRET_KEY) == null)) { + || dataSourceMetadataConfig.get(EMR_SECRET_KEY) == null)) { throw new IllegalArgumentException("EMR auth keys are missing."); - } else if (!dataSourceMetadataConfig.get(EMR_AUTH_TYPE) + } else if (!dataSourceMetadataConfig + .get(EMR_AUTH_TYPE) .equals(AuthenticationType.AWSSIGV4AUTH.getName())) { throw new IllegalArgumentException("Invalid auth type."); } @@ -124,8 +124,8 @@ private void validateEMRConfigProperties(Map dataSourceMetadataC private AmazonElasticMapReduce getEMRClient( String emrAccessKey, String emrSecretKey, String emrRegion) { return AmazonElasticMapReduceClientBuilder.standard() - .withCredentials(new AWSStaticCredentialsProvider( - new BasicAWSCredentials(emrAccessKey, emrSecretKey))) + .withCredentials( + new AWSStaticCredentialsProvider(new BasicAWSCredentials(emrAccessKey, emrSecretKey))) .withRegion(emrRegion) .build(); } diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java index 5151405db9..731c3df672 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java @@ -18,20 +18,14 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * Spark table implementation. - * This can be constructed from SparkQueryRequest. - */ +/** Spark table implementation. This can be constructed from SparkQueryRequest. */ public class SparkTable implements Table { private final SparkClient sparkClient; - @Getter - private final SparkQueryRequest sparkQueryRequest; + @Getter private final SparkQueryRequest sparkQueryRequest; - /** - * Constructor for entire Sql Request. - */ + /** Constructor for entire Sql Request. */ public SparkTable(SparkClient sparkService, SparkQueryRequest sparkQueryRequest) { this.sparkClient = sparkService; this.sparkQueryRequest = sparkQueryRequest; @@ -56,8 +50,7 @@ public Map getFieldTypes() { @Override public PhysicalPlan implement(LogicalPlan plan) { - SparkScan metricScan = - new SparkScan(sparkClient); + SparkScan metricScan = new SparkScan(sparkClient); metricScan.setRequest(sparkQueryRequest); return plan.accept(new DefaultImplementor(), metricScan); } diff --git a/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java b/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java index a94ac01f2f..93dc0d6bc8 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java @@ -29,12 +29,9 @@ @ExtendWith(MockitoExtension.class) public class EmrClientImplTest { - @Mock - private AmazonElasticMapReduce emr; - @Mock - private FlintHelper flint; - @Mock - private SparkResponse sparkResponse; + @Mock private AmazonElasticMapReduce emr; + @Mock private FlintHelper flint; + @Mock private SparkResponse sparkResponse; @Test @SneakyThrows @@ -50,8 +47,8 @@ void testRunEmrApplication() { describeStepResult.setStep(step); when(emr.describeStep(any())).thenReturn(describeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); emrClientImpl.runEmrApplication(QUERY); } @@ -69,12 +66,12 @@ void testRunEmrApplicationFailed() { describeStepResult.setStep(step); when(emr.describeStep(any())).thenReturn(describeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> emrClientImpl.runEmrApplication(QUERY)); - Assertions.assertEquals("Spark SQL application failed.", - exception.getMessage()); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> emrClientImpl.runEmrApplication(QUERY)); + Assertions.assertEquals("Spark SQL application failed.", exception.getMessage()); } @Test @@ -91,12 +88,12 @@ void testRunEmrApplicationCancelled() { describeStepResult.setStep(step); when(emr.describeStep(any())).thenReturn(describeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> emrClientImpl.runEmrApplication(QUERY)); - Assertions.assertEquals("Spark SQL application failed.", - exception.getMessage()); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> emrClientImpl.runEmrApplication(QUERY)); + Assertions.assertEquals("Spark SQL application failed.", exception.getMessage()); } @Test @@ -119,11 +116,12 @@ void testRunEmrApplicationRunnning() { DescribeStepResult completedDescribeStepResult = new DescribeStepResult(); completedDescribeStepResult.setStep(completedStep); - when(emr.describeStep(any())).thenReturn(runningDescribeStepResult) + when(emr.describeStep(any())) + .thenReturn(runningDescribeStepResult) .thenReturn(completedDescribeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); emrClientImpl.runEmrApplication(QUERY); } @@ -147,14 +145,14 @@ void testSql() { DescribeStepResult completedDescribeStepResult = new DescribeStepResult(); completedDescribeStepResult.setStep(completedStep); - when(emr.describeStep(any())).thenReturn(runningDescribeStepResult) + when(emr.describeStep(any())) + .thenReturn(runningDescribeStepResult) .thenReturn(completedDescribeStepResult); when(sparkResponse.getResultFromOpensearchIndex()) .thenReturn(new JSONObject(getJson("select_query_response.json"))); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); emrClientImpl.sql(QUERY); - } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java index 18db5b9471..120747e0d3 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java @@ -27,51 +27,52 @@ @ExtendWith(MockitoExtension.class) public class SparkSqlFunctionImplementationTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; @Test void testValueOfAndTypeToString() { FunctionName functionName = new FunctionName("sql"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); - SparkSqlFunctionImplementation sparkSqlFunctionImplementation - = new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); - UnsupportedOperationException exception = assertThrows(UnsupportedOperationException.class, - () -> sparkSqlFunctionImplementation.valueOf()); - assertEquals("Spark defined function [sql] is only " - + "supported in SOURCE clause with spark connector catalog", exception.getMessage()); - assertEquals("sql(query=\"select 1\")", - sparkSqlFunctionImplementation.toString()); + List namedArgumentExpressionList = + List.of(DSL.namedArgument("query", DSL.literal(QUERY))); + SparkSqlFunctionImplementation sparkSqlFunctionImplementation = + new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); + UnsupportedOperationException exception = + assertThrows( + UnsupportedOperationException.class, () -> sparkSqlFunctionImplementation.valueOf()); + assertEquals( + "Spark defined function [sql] is only " + + "supported in SOURCE clause with spark connector catalog", + exception.getMessage()); + assertEquals("sql(query=\"select 1\")", sparkSqlFunctionImplementation.toString()); assertEquals(ExprCoreType.STRUCT, sparkSqlFunctionImplementation.type()); } @Test void testApplyArguments() { FunctionName functionName = new FunctionName("sql"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); - SparkSqlFunctionImplementation sparkSqlFunctionImplementation - = new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); - SparkTable sparkTable - = (SparkTable) sparkSqlFunctionImplementation.applyArguments(); + List namedArgumentExpressionList = + List.of(DSL.namedArgument("query", DSL.literal(QUERY))); + SparkSqlFunctionImplementation sparkSqlFunctionImplementation = + new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); + SparkTable sparkTable = (SparkTable) sparkSqlFunctionImplementation.applyArguments(); assertNotNull(sparkTable.getSparkQueryRequest()); - SparkQueryRequest sparkQueryRequest - = sparkTable.getSparkQueryRequest(); + SparkQueryRequest sparkQueryRequest = sparkTable.getSparkQueryRequest(); assertEquals(QUERY, sparkQueryRequest.getSql()); } @Test void testApplyArgumentsException() { FunctionName functionName = new FunctionName("sql"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal(QUERY)), - DSL.namedArgument("tmp", DSL.literal(12345))); - SparkSqlFunctionImplementation sparkSqlFunctionImplementation - = new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> sparkSqlFunctionImplementation.applyArguments()); + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal(QUERY)), + DSL.namedArgument("tmp", DSL.literal(12345))); + SparkSqlFunctionImplementation sparkSqlFunctionImplementation = + new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> sparkSqlFunctionImplementation.applyArguments()); assertEquals("Invalid Function Argument:tmp", exception.getMessage()); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java index 94c87602b7..212056eb15 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java @@ -18,23 +18,20 @@ import org.opensearch.sql.storage.TableScanOperator; public class SparkSqlFunctionTableScanBuilderTest { - @Mock - private SparkClient sparkClient; + @Mock private SparkClient sparkClient; - @Mock - private LogicalProject logicalProject; + @Mock private LogicalProject logicalProject; @Test void testBuild() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder - = new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); - TableScanOperator sqlFunctionTableScanOperator - = sparkSqlFunctionTableScanBuilder.build(); - Assertions.assertTrue(sqlFunctionTableScanOperator - instanceof SparkSqlFunctionTableScanOperator); + SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder = + new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); + TableScanOperator sqlFunctionTableScanOperator = sparkSqlFunctionTableScanBuilder.build(); + Assertions.assertTrue( + sqlFunctionTableScanOperator instanceof SparkSqlFunctionTableScanOperator); } @Test @@ -42,8 +39,8 @@ void testPushProject() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder - = new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder = + new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); Assertions.assertTrue(sparkSqlFunctionTableScanBuilder.pushDownProject(logicalProject)); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java index f6807f9913..586f0ef2d8 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java @@ -43,8 +43,7 @@ @ExtendWith(MockitoExtension.class) public class SparkSqlFunctionTableScanOperatorTest { - @Mock - private SparkClient sparkClient; + @Mock private SparkClient sparkClient; @Test @SneakyThrows @@ -52,15 +51,14 @@ void testEmptyQueryWithException() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, sparkSqlFunctionTableScanOperator::open); - assertEquals("Error fetching data from spark server: Error Message", - runtimeException.getMessage()); + when(sparkClient.sql(any())).thenThrow(new IOException("Error Message")); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, sparkSqlFunctionTableScanOperator::open); + assertEquals( + "Error fetching data from spark server: Error Message", runtimeException.getMessage()); } @Test @@ -69,8 +67,8 @@ void testClose() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); sparkSqlFunctionTableScanOperator.close(); } @@ -80,11 +78,10 @@ void testExplain() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - Assertions.assertEquals("sql(select 1)", - sparkSqlFunctionTableScanOperator.explain()); + Assertions.assertEquals("sql(select 1)", sparkSqlFunctionTableScanOperator.explain()); } @Test @@ -93,18 +90,19 @@ void testQueryResponseIterator() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn(new JSONObject(getJson("select_query_response.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("select_query_response.json"))); sparkSqlFunctionTableScanOperator.open(); assertTrue(sparkSqlFunctionTableScanOperator.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() { - { - put("1", new ExprIntegerValue(1)); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("1", new ExprIntegerValue(1)); + } + }); assertEquals(firstRow, sparkSqlFunctionTableScanOperator.next()); Assertions.assertFalse(sparkSqlFunctionTableScanOperator.hasNext()); } @@ -115,28 +113,29 @@ void testQueryResponseAllTypes() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn(new JSONObject(getJson("all_data_type.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("all_data_type.json"))); sparkSqlFunctionTableScanOperator.open(); assertTrue(sparkSqlFunctionTableScanOperator.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() { - { - put("boolean", ExprBooleanValue.of(true)); - put("long", new ExprLongValue(922337203)); - put("integer", new ExprIntegerValue(2147483647)); - put("short", new ExprShortValue(32767)); - put("byte", new ExprByteValue(127)); - put("double", new ExprDoubleValue(9223372036854.775807)); - put("float", new ExprFloatValue(21474.83647)); - put("timestamp", new ExprDateValue("2023-07-01 10:31:30")); - put("date", new ExprTimestampValue("2023-07-01 10:31:30")); - put("string", new ExprStringValue("ABC")); - put("char", new ExprStringValue("A")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("boolean", ExprBooleanValue.of(true)); + put("long", new ExprLongValue(922337203)); + put("integer", new ExprIntegerValue(2147483647)); + put("short", new ExprShortValue(32767)); + put("byte", new ExprByteValue(127)); + put("double", new ExprDoubleValue(9223372036854.775807)); + put("float", new ExprFloatValue(21474.83647)); + put("timestamp", new ExprDateValue("2023-07-01 10:31:30")); + put("date", new ExprTimestampValue("2023-07-01 10:31:30")); + put("string", new ExprStringValue("ABC")); + put("char", new ExprStringValue("A")); + } + }); assertEquals(firstRow, sparkSqlFunctionTableScanOperator.next()); Assertions.assertFalse(sparkSqlFunctionTableScanOperator.hasNext()); } @@ -147,16 +146,15 @@ void testQueryResponseInvalidDataType() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn(new JSONObject(getJson("invalid_data_type.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("invalid_data_type.json"))); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> sparkSqlFunctionTableScanOperator.open()); - Assertions.assertEquals("Result contains invalid data type", - exception.getMessage()); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> sparkSqlFunctionTableScanOperator.open()); + Assertions.assertEquals("Result contains invalid data type", exception.getMessage()); } @Test @@ -165,17 +163,14 @@ void testQuerySchema() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn( - new JSONObject(getJson("select_query_response.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("select_query_response.json"))); sparkSqlFunctionTableScanOperator.open(); ArrayList columns = new ArrayList<>(); columns.add(new ExecutionEngine.Schema.Column("1", "1", ExprCoreType.INTEGER)); ExecutionEngine.Schema expectedSchema = new ExecutionEngine.Schema(columns); assertEquals(expectedSchema, sparkSqlFunctionTableScanOperator.schema()); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java index e18fac36de..a828ac76c4 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java @@ -35,107 +35,106 @@ @ExtendWith(MockitoExtension.class) public class SparkSqlTableFunctionResolverTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; - @Mock - private FunctionProperties functionProperties; + @Mock private FunctionProperties functionProperties; @Test void testResolve() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + List expressions = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof SparkSqlFunctionImplementation); - SparkTable sparkTable - = (SparkTable) functionImplementation.applyArguments(); + SparkTable sparkTable = (SparkTable) functionImplementation.applyArguments(); assertNotNull(sparkTable.getSparkQueryRequest()); - SparkQueryRequest sparkQueryRequest = - sparkTable.getSparkQueryRequest(); + SparkQueryRequest sparkQueryRequest = sparkTable.getSparkQueryRequest(); assertEquals(QUERY, sparkQueryRequest.getSql()); } @Test void testArgumentsPassedByPosition() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(DSL.namedArgument(null, DSL.literal(QUERY))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); + List expressions = List.of(DSL.namedArgument(null, DSL.literal(QUERY))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof SparkSqlFunctionImplementation); - SparkTable sparkTable - = (SparkTable) functionImplementation.applyArguments(); + SparkTable sparkTable = (SparkTable) functionImplementation.applyArguments(); assertNotNull(sparkTable.getSparkQueryRequest()); - SparkQueryRequest sparkQueryRequest = - sparkTable.getSparkQueryRequest(); + SparkQueryRequest sparkQueryRequest = sparkTable.getSparkQueryRequest(); assertEquals(QUERY, sparkQueryRequest.getSql()); } @Test void testMixedArgumentTypes() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal(QUERY)), - DSL.namedArgument(null, DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal(QUERY)), + DSL.namedArgument(null, DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Arguments should be either passed by name or position", exception.getMessage()); } @Test void testWrongArgumentsSizeWhenPassedByName() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + List expressions = List.of(); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Missing arguments:[query]", exception.getMessage()); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java b/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java index abc4c81626..211561ac72 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java @@ -32,18 +32,12 @@ @ExtendWith(MockitoExtension.class) public class SparkResponseTest { - @Mock - private Client client; - @Mock - private SearchResponse searchResponse; - @Mock - private DeleteResponse deleteResponse; - @Mock - private SearchHit searchHit; - @Mock - private ActionFuture searchResponseActionFuture; - @Mock - private ActionFuture deleteResponseActionFuture; + @Mock private Client client; + @Mock private SearchResponse searchResponse; + @Mock private DeleteResponse deleteResponse; + @Mock private SearchHit searchHit; + @Mock private ActionFuture searchResponseActionFuture; + @Mock private ActionFuture deleteResponseActionFuture; @Test public void testGetResultFromOpensearchIndex() { @@ -53,12 +47,8 @@ public void testGetResultFromOpensearchIndex() { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), - 1.0F)); - Mockito.when(searchHit.getSourceAsMap()) - .thenReturn(Map.of("stepId", EMR_CLUSTER_ID)); - + new SearchHit[] {searchHit}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); + Mockito.when(searchHit.getSourceAsMap()).thenReturn(Map.of("stepId", EMR_CLUSTER_ID)); when(client.delete(any())).thenReturn(deleteResponseActionFuture); when(deleteResponseActionFuture.actionGet()).thenReturn(deleteResponse); @@ -75,11 +65,13 @@ public void testInvalidSearchResponse() { when(searchResponse.status()).thenReturn(RestStatus.NO_CONTENT); SparkResponse sparkResponse = new SparkResponse(client, EMR_CLUSTER_ID, "stepId"); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> sparkResponse.getResultFromOpensearchIndex()); + RuntimeException exception = + assertThrows(RuntimeException.class, () -> sparkResponse.getResultFromOpensearchIndex()); Assertions.assertEquals( - "Fetching result from " + SPARK_INDEX_NAME - + " index failed with status : " + RestStatus.NO_CONTENT, + "Fetching result from " + + SPARK_INDEX_NAME + + " index failed with status : " + + RestStatus.NO_CONTENT, exception.getMessage()); } @@ -104,8 +96,9 @@ public void testNotFoundDeleteResponse() { when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOT_FOUND); SparkResponse sparkResponse = new SparkResponse(client, EMR_CLUSTER_ID, "stepId"); - RuntimeException exception = assertThrows(ResourceNotFoundException.class, - () -> sparkResponse.deleteInSparkIndex("123")); + RuntimeException exception = + assertThrows( + ResourceNotFoundException.class, () -> sparkResponse.deleteInSparkIndex("123")); Assertions.assertEquals("Spark result with id 123 doesn't exist", exception.getMessage()); } @@ -116,8 +109,8 @@ public void testInvalidDeleteResponse() { when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOOP); SparkResponse sparkResponse = new SparkResponse(client, EMR_CLUSTER_ID, "stepId"); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> sparkResponse.deleteInSparkIndex("123")); + RuntimeException exception = + assertThrows(RuntimeException.class, () -> sparkResponse.deleteInSparkIndex("123")); Assertions.assertEquals( "Deleting spark result information failed with : noop", exception.getMessage()); } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java index c57142f580..971db3c33c 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java @@ -19,8 +19,7 @@ @ExtendWith(MockitoExtension.class) public class SparkScanTest { - @Mock - private SparkClient sparkClient; + @Mock private SparkClient sparkClient; @Test @SneakyThrows @@ -36,8 +35,6 @@ void testQueryResponseIteratorForQueryRangeFunction() { void testExplain() { SparkScan sparkScan = new SparkScan(sparkClient); sparkScan.getRequest().setSql(QUERY); - assertEquals( - "SparkQueryRequest(sql=select 1)", - sparkScan.explain()); + assertEquals("SparkQueryRequest(sql=select 1)", sparkScan.explain()); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java index d42e123678..5e7ec76cdb 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java @@ -22,14 +22,12 @@ @ExtendWith(MockitoExtension.class) public class SparkStorageEngineTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; @Test public void getFunctions() { SparkStorageEngine engine = new SparkStorageEngine(client); - Collection functionResolverCollection - = engine.getFunctions(); + Collection functionResolverCollection = engine.getFunctions(); assertNotNull(functionResolverCollection); assertEquals(1, functionResolverCollection.size()); assertTrue( @@ -39,8 +37,10 @@ public void getFunctions() { @Test public void getTable() { SparkStorageEngine engine = new SparkStorageEngine(client); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> engine.getTable(new DataSourceSchemaName("spark", "default"), "")); + RuntimeException exception = + assertThrows( + RuntimeException.class, + () -> engine.getTable(new DataSourceSchemaName("spark", "default"), "")); assertEquals("Unable to get table from storage engine.", exception.getMessage()); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java index c68adf2039..eb93cdabfe 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java @@ -24,17 +24,14 @@ @ExtendWith(MockitoExtension.class) public class SparkStorageFactoryTest { - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private Client client; + @Mock private Client client; @Test void testGetConnectorType() { SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - Assertions.assertEquals( - DataSourceType.SPARK, sparkStorageFactory.getDataSourceType()); + Assertions.assertEquals(DataSourceType.SPARK, sparkStorageFactory.getDataSourceType()); } @Test @@ -48,8 +45,7 @@ void testGetStorageEngine() { properties.put("emr.auth.secret_key", "secret_key"); properties.put("emr.auth.region", "region"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - StorageEngine storageEngine - = sparkStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = sparkStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof SparkStorageEngine); } @@ -59,10 +55,11 @@ void testInvalidConnectorType() { HashMap properties = new HashMap<>(); properties.put("spark.connector", "random"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - InvalidParameterException exception = Assertions.assertThrows(InvalidParameterException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Spark connector type is invalid.", - exception.getMessage()); + InvalidParameterException exception = + Assertions.assertThrows( + InvalidParameterException.class, + () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("Spark connector type is invalid.", exception.getMessage()); } @Test @@ -72,10 +69,10 @@ void testMissingAuth() { properties.put("spark.connector", "emr"); properties.put("emr.cluster", EMR_CLUSTER_ID); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR config properties are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR config properties are missing.", exception.getMessage()); } @Test @@ -86,10 +83,10 @@ void testUnsupportedEmrAuth() { properties.put("emr.cluster", EMR_CLUSTER_ID); properties.put("emr.auth.type", "basic"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Invalid auth type.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("Invalid auth type.", exception.getMessage()); } @Test @@ -99,10 +96,10 @@ void testMissingCluster() { properties.put("spark.connector", "emr"); properties.put("emr.auth.type", "awssigv4"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR config properties are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR config properties are missing.", exception.getMessage()); } @Test @@ -113,10 +110,10 @@ void testMissingAuthKeys() { properties.put("emr.cluster", EMR_CLUSTER_ID); properties.put("emr.auth.type", "awssigv4"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR auth keys are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR auth keys are missing.", exception.getMessage()); } @Test @@ -128,10 +125,10 @@ void testMissingAuthSecretKey() { properties.put("emr.auth.type", "awssigv4"); properties.put("emr.auth.access_key", "test"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR auth keys are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR auth keys are missing.", exception.getMessage()); } @Test @@ -178,5 +175,4 @@ void testSetSparkJars() { DataSource dataSource = new SparkStorageFactory(client, settings).createDataSource(metadata); Assertions.assertTrue(dataSource.getStorageEngine() instanceof SparkStorageEngine); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java index 39bd2eb199..a70d4ba69e 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java @@ -31,26 +31,23 @@ @ExtendWith(MockitoExtension.class) public class SparkTableTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; @Test void testUnsupportedOperation() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); - SparkTable sparkTable = - new SparkTable(client, sparkQueryRequest); + SparkTable sparkTable = new SparkTable(client, sparkQueryRequest); assertThrows(UnsupportedOperationException.class, sparkTable::exists); - assertThrows(UnsupportedOperationException.class, - () -> sparkTable.create(Collections.emptyMap())); + assertThrows( + UnsupportedOperationException.class, () -> sparkTable.create(Collections.emptyMap())); } @Test void testCreateScanBuilderWithSqlTableFunction() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkTable sparkTable = - new SparkTable(client, sparkQueryRequest); + SparkTable sparkTable = new SparkTable(client, sparkQueryRequest); TableScanBuilder tableScanBuilder = sparkTable.createScanBuilder(); Assertions.assertNotNull(tableScanBuilder); Assertions.assertTrue(tableScanBuilder instanceof SparkSqlFunctionTableScanBuilder); @@ -59,8 +56,7 @@ void testCreateScanBuilderWithSqlTableFunction() { @Test @SneakyThrows void testGetFieldTypesFromSparkQueryRequest() { - SparkTable sparkTable - = new SparkTable(client, new SparkQueryRequest()); + SparkTable sparkTable = new SparkTable(client, new SparkQueryRequest()); Map expectedFieldTypes = new HashMap<>(); Map fieldTypes = sparkTable.getFieldTypes(); @@ -73,10 +69,9 @@ void testGetFieldTypesFromSparkQueryRequest() { void testImplementWithSqlFunction() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkTable sparkMetricTable = - new SparkTable(client, sparkQueryRequest); - PhysicalPlan plan = sparkMetricTable.implement( - new SparkSqlFunctionTableScanBuilder(client, sparkQueryRequest)); + SparkTable sparkMetricTable = new SparkTable(client, sparkQueryRequest); + PhysicalPlan plan = + sparkMetricTable.implement(new SparkSqlFunctionTableScanBuilder(client, sparkQueryRequest)); assertTrue(plan instanceof SparkSqlFunctionTableScanOperator); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java b/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java index b480e6d9d9..ca77006d9c 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java +++ b/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java @@ -12,6 +12,7 @@ public class TestUtils { /** * Get Json document from the files in resources folder. + * * @param filename filename. * @return String. * @throws IOException IOException. @@ -21,5 +22,4 @@ public static String getJson(String filename) throws IOException { return new String( Objects.requireNonNull(classLoader.getResourceAsStream(filename)).readAllBytes()); } - } diff --git a/sql/build.gradle b/sql/build.gradle index 44dc37cf0f..d85cc4ca74 100644 --- a/sql/build.gradle +++ b/sql/build.gradle @@ -58,6 +58,11 @@ dependencies { testImplementation(testFixtures(project(":core"))) } +// Being ignored as a temporary measure before being removed in favour of +// spotless https://github.com/opensearch-project/sql/issues/1101 +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + test { useJUnitPlatform() testLogging { diff --git a/sql/src/main/java/org/opensearch/sql/sql/SQLService.java b/sql/src/main/java/org/opensearch/sql/sql/SQLService.java index 91ec00cdd5..e1ca778453 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/SQLService.java +++ b/sql/src/main/java/org/opensearch/sql/sql/SQLService.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import java.util.Optional; @@ -21,9 +20,7 @@ import org.opensearch.sql.sql.parser.AstBuilder; import org.opensearch.sql.sql.parser.AstStatementBuilder; -/** - * SQL service. - */ +/** SQL service. */ @RequiredArgsConstructor public class SQLService { @@ -69,15 +66,19 @@ private AbstractPlan plan( if (request.getCursor().isPresent()) { // Handle v2 cursor here -- legacy cursor was handled earlier. if (isExplainRequest) { - throw new UnsupportedOperationException("Explain of a paged query continuation " - + "is not supported. Use `explain` for the initial query request."); + throw new UnsupportedOperationException( + "Explain of a paged query continuation " + + "is not supported. Use `explain` for the initial query request."); } if (request.isCursorCloseRequest()) { - return queryExecutionFactory.createCloseCursor(request.getCursor().get(), - queryListener.orElse(null)); + return queryExecutionFactory.createCloseCursor( + request.getCursor().get(), queryListener.orElse(null)); } - return queryExecutionFactory.create(request.getCursor().get(), - isExplainRequest, queryListener.orElse(null), explainListener.orElse(null)); + return queryExecutionFactory.create( + request.getCursor().get(), + isExplainRequest, + queryListener.orElse(null), + explainListener.orElse(null)); } else { // 1.Parse query and convert parse tree (CST) to abstract syntax tree (AST) ParseTree cst = parser.parse(request.getQuery()); @@ -90,8 +91,7 @@ private AbstractPlan plan( .fetchSize(request.getFetchSize()) .build())); - return queryExecutionFactory.create( - statement, queryListener, explainListener); + return queryExecutionFactory.create(statement, queryListener, explainListener); } } } diff --git a/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java b/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java index bd7b5cbedf..0d1b89f7a9 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java +++ b/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import static org.opensearch.sql.sql.antlr.parser.OpenSearchSQLLexer.BACKTICK_QUOTE_ID; @@ -31,21 +30,17 @@ import org.antlr.v4.runtime.tree.ParseTreeListener; import org.antlr.v4.runtime.tree.TerminalNode; -/** - * Parse tree listener for anonymizing SQL requests. - */ +/** Parse tree listener for anonymizing SQL requests. */ public class AnonymizerListener implements ParseTreeListener { private String anonymizedQueryString = ""; private static final int NO_TYPE = -1; private int previousType = NO_TYPE; @Override - public void enterEveryRule(ParserRuleContext ctx) { - } + public void enterEveryRule(ParserRuleContext ctx) {} @Override - public void exitEveryRule(ParserRuleContext ctx) { - } + public void exitEveryRule(ParserRuleContext ctx) {} @Override public void visitTerminal(TerminalNode node) { @@ -57,10 +52,11 @@ public void visitTerminal(TerminalNode node) { int token = node.getSymbol().getType(); boolean isDotIdentifiers = token == DOT || previousType == DOT; boolean isComma = token == COMMA; - boolean isEqualComparison = ((token == EQUAL_SYMBOL) + boolean isEqualComparison = + ((token == EQUAL_SYMBOL) && (previousType == LESS_SYMBOL - || previousType == GREATER_SYMBOL - || previousType == EXCLAMATION_SYMBOL)); + || previousType == GREATER_SYMBOL + || previousType == EXCLAMATION_SYMBOL)); boolean isNotEqualComparisonAlternative = previousType == LESS_SYMBOL && token == GREATER_SYMBOL; if (!isDotIdentifiers && !isComma && !isEqualComparison && !isNotEqualComparisonAlternative) { @@ -103,9 +99,7 @@ public void visitTerminal(TerminalNode node) { } @Override - public void visitErrorNode(ErrorNode node) { - - } + public void visitErrorNode(ErrorNode node) {} public String getAnonymizedQueryString() { return "(" + anonymizedQueryString + ")"; diff --git a/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java b/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java index 4f7b925718..d1a6adc236 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java +++ b/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import org.antlr.v4.runtime.CommonTokenStream; @@ -16,16 +15,15 @@ import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLLexer; import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser; -/** - * SQL syntax parser which encapsulates an ANTLR parser. - */ +/** SQL syntax parser which encapsulates an ANTLR parser. */ public class SQLSyntaxParser implements Parser { private static final Logger LOG = LogManager.getLogger(SQLSyntaxParser.class); /** * Parse a SQL query by ANTLR parser. - * @param query a SQL query - * @return parse tree root + * + * @param query a SQL query + * @return parse tree root */ @Override public ParseTree parse(String query) { diff --git a/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java b/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java index c9321f5775..4e902cb67d 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java +++ b/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.domain; import java.util.Collections; @@ -20,43 +19,30 @@ import org.json.JSONObject; import org.opensearch.sql.protocol.response.format.Format; -/** - * SQL query request. - */ +/** SQL query request. */ @ToString @EqualsAndHashCode @RequiredArgsConstructor public class SQLQueryRequest { private static final String QUERY_FIELD_CURSOR = "cursor"; - private static final Set SUPPORTED_FIELDS = Set.of( - "query", "fetch_size", "parameters", QUERY_FIELD_CURSOR); + private static final Set SUPPORTED_FIELDS = + Set.of("query", "fetch_size", "parameters", QUERY_FIELD_CURSOR); private static final String QUERY_PARAMS_FORMAT = "format"; private static final String QUERY_PARAMS_SANITIZE = "sanitize"; - /** - * JSON payload in REST request. - */ + /** JSON payload in REST request. */ private final JSONObject jsonContent; - /** - * SQL query. - */ - @Getter - private final String query; + /** SQL query. */ + @Getter private final String query; - /** - * Request path. - */ + /** Request path. */ private final String path; - /** - * Request format. - */ + /** Request format. */ private final String format; - /** - * Request params. - */ + /** Request params. */ private Map params = Collections.emptyMap(); @Getter @@ -65,11 +51,13 @@ public class SQLQueryRequest { private String cursor; - /** - * Constructor of SQLQueryRequest that passes request params. - */ - public SQLQueryRequest(JSONObject jsonContent, String query, String path, - Map params, String cursor) { + /** Constructor of SQLQueryRequest that passes request params. */ + public SQLQueryRequest( + JSONObject jsonContent, + String query, + String path, + Map params, + String cursor) { this.jsonContent = jsonContent; this.query = query; this.path = path; @@ -80,24 +68,30 @@ public SQLQueryRequest(JSONObject jsonContent, String query, String path, } /** + * + * + *
    * Pre-check if the request can be supported by meeting ALL the following criteria:
    *  1.Only supported fields present in request body, ex. "filter" and "cursor" are not supported
    *  2.Response format is default or can be supported.
+   * 
* * @return true if supported. */ public boolean isSupported() { var noCursor = !isCursor(); var noQuery = query == null; - var noUnsupportedParams = params.isEmpty() - || (params.size() == 1 && params.containsKey(QUERY_PARAMS_FORMAT)); + var noUnsupportedParams = + params.isEmpty() || (params.size() == 1 && params.containsKey(QUERY_PARAMS_FORMAT)); var noContent = jsonContent == null || jsonContent.isEmpty(); - return ((!noCursor && noQuery - && noUnsupportedParams && noContent) // if cursor is given, but other things - || (noCursor && !noQuery)) // or if cursor is not given, but query - && isOnlySupportedFieldInPayload() // and request has supported fields only - && isSupportedFormat(); // and request is in supported format + return ((!noCursor + && noQuery + && noUnsupportedParams + && noContent) // if cursor is given, but other things + || (noCursor && !noQuery)) // or if cursor is not given, but query + && isOnlySupportedFieldInPayload() // and request has supported fields only + && isSupportedFormat(); // and request is in supported format } private boolean isCursor() { @@ -106,6 +100,7 @@ private boolean isCursor() { /** * Check if request is to explain rather than execute the query. + * * @return true if it is an explain request */ public boolean isExplainRequest() { @@ -116,16 +111,14 @@ public boolean isCursorCloseRequest() { return path.endsWith("/close"); } - /** - * Decide on the formatter by the requested format. - */ + /** Decide on the formatter by the requested format. */ public Format format() { Optional optionalFormat = Format.of(format); if (optionalFormat.isPresent()) { return optionalFormat.get(); } else { throw new IllegalArgumentException( - String.format(Locale.ROOT,"response in %s format is not supported.", format)); + String.format(Locale.ROOT, "response in %s format is not supported.", format)); } } @@ -155,5 +148,4 @@ private boolean shouldSanitize(Map params) { } return true; } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java index bd4464d00e..e46147b7a3 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static java.util.Collections.emptyList; @@ -27,6 +26,8 @@ import org.opensearch.sql.sql.parser.context.QuerySpecification; /** + * + * *
SelectExpressionAnalyzerTest
  * AST aggregation builder that builds AST aggregation node for the following scenarios:
  *
@@ -59,9 +60,7 @@
 @RequiredArgsConstructor
 public class AstAggregationBuilder extends OpenSearchSQLParserBaseVisitor {
 
-  /**
-   * Query specification that contains info collected beforehand.
-   */
+  /** Query specification that contains info collected beforehand. */
   private final QuerySpecification querySpec;
 
   @Override
@@ -78,10 +77,7 @@ public UnresolvedPlan visit(ParseTree groupByClause) {
 
   private UnresolvedPlan buildExplicitAggregation() {
     List groupByItems = replaceGroupByItemIfAliasOrOrdinal();
-    return new Aggregation(
-        new ArrayList<>(querySpec.getAggregators()),
-        emptyList(),
-        groupByItems);
+    return new Aggregation(new ArrayList<>(querySpec.getAggregators()), emptyList(), groupByItems);
   }
 
   private UnresolvedPlan buildImplicitAggregation() {
@@ -89,33 +85,32 @@ private UnresolvedPlan buildImplicitAggregation() {
 
     if (invalidSelectItem.isPresent()) {
       // Report semantic error to avoid fall back to old engine again
-      throw new SemanticCheckException(StringUtils.format(
-          "Explicit GROUP BY clause is required because expression [%s] "
-              + "contains non-aggregated column", invalidSelectItem.get()));
+      throw new SemanticCheckException(
+          StringUtils.format(
+              "Explicit GROUP BY clause is required because expression [%s] "
+                  + "contains non-aggregated column",
+              invalidSelectItem.get()));
     }
 
     return new Aggregation(
-        new ArrayList<>(querySpec.getAggregators()),
-        emptyList(),
-        querySpec.getGroupByItems());
+        new ArrayList<>(querySpec.getAggregators()), emptyList(), querySpec.getGroupByItems());
   }
 
   private List replaceGroupByItemIfAliasOrOrdinal() {
-    return querySpec.getGroupByItems()
-                    .stream()
-                    .map(querySpec::replaceIfAliasOrOrdinal)
-                    .map(expr -> new Alias(expr.toString(), expr))
-                    .collect(Collectors.toList());
+    return querySpec.getGroupByItems().stream()
+        .map(querySpec::replaceIfAliasOrOrdinal)
+        .map(expr -> new Alias(expr.toString(), expr))
+        .collect(Collectors.toList());
   }
 
   /**
-   * Find non-aggregate item in SELECT clause. Note that literal is special which is not required
-   * to be applied by aggregate function.
+   * Find non-aggregate item in SELECT clause. Note that literal is special which is not required to
+   * be applied by aggregate function.
    */
   private Optional findNonAggregatedItemInSelect() {
     return querySpec.getSelectItems().stream()
-                                     .filter(this::isNonAggregateOrLiteralExpression)
-                                     .findFirst();
+        .filter(this::isNonAggregateOrLiteralExpression)
+        .findFirst();
   }
 
   private boolean isAggregatorNotFoundAnywhere() {
@@ -132,8 +127,7 @@ private boolean isNonAggregateOrLiteralExpression(UnresolvedExpression expr) {
     }
 
     List children = expr.getChild();
-    return children.stream().anyMatch(child ->
-        isNonAggregateOrLiteralExpression((UnresolvedExpression) child));
+    return children.stream()
+        .anyMatch(child -> isNonAggregateOrLiteralExpression((UnresolvedExpression) child));
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java
index 020889c082..ab96f16263 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static java.util.Collections.emptyList;
@@ -43,22 +42,18 @@
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParserBaseVisitor;
 import org.opensearch.sql.sql.parser.context.ParsingContext;
 
-/**
- * Abstract syntax tree (AST) builder.
- */
+/** Abstract syntax tree (AST) builder. */
 @RequiredArgsConstructor
 public class AstBuilder extends OpenSearchSQLParserBaseVisitor {
 
   private final AstExpressionBuilder expressionBuilder = new AstExpressionBuilder();
 
-  /**
-   * Parsing context stack that contains context for current query parsing.
-   */
+  /** Parsing context stack that contains context for current query parsing. */
   private final ParsingContext context = new ParsingContext();
 
   /**
-   * SQL query to get original token text. This is necessary because token.getText() returns
-   * text without whitespaces or other characters discarded by lexer.
+   * SQL query to get original token text. This is necessary because token.getText() returns text
+   * without whitespaces or other characters discarded by lexer.
    */
   private final String query;
 
@@ -91,8 +86,7 @@ public UnresolvedPlan visitQuerySpecification(QuerySpecificationContext queryCon
 
     if (queryContext.fromClause() == null) {
       Optional allFields =
-          project.getProjectList().stream().filter(node -> node instanceof AllFields)
-              .findFirst();
+          project.getProjectList().stream().filter(node -> node instanceof AllFields).findFirst();
       if (allFields.isPresent()) {
         throw new SyntaxCheckException("No FROM clause found for select all");
       }
@@ -119,9 +113,8 @@ public UnresolvedPlan visitQuerySpecification(QuerySpecificationContext queryCon
 
   @Override
   public UnresolvedPlan visitSelectClause(SelectClauseContext ctx) {
-    ImmutableList.Builder builder =
-        new ImmutableList.Builder<>();
-    if (ctx.selectElements().star != null) { //TODO: project operator should be required?
+    ImmutableList.Builder builder = new ImmutableList.Builder<>();
+    if (ctx.selectElements().star != null) { // TODO: project operator should be required?
       builder.add(AllFields.of());
     }
     ctx.selectElements().selectElement().forEach(field -> builder.add(visitSelectItem(field)));
@@ -132,8 +125,7 @@ public UnresolvedPlan visitSelectClause(SelectClauseContext ctx) {
   public UnresolvedPlan visitLimitClause(OpenSearchSQLParser.LimitClauseContext ctx) {
     return new Limit(
         Integer.parseInt(ctx.limit.getText()),
-        ctx.offset == null ? 0 : Integer.parseInt(ctx.offset.getText())
-    );
+        ctx.offset == null ? 0 : Integer.parseInt(ctx.offset.getText()));
   }
 
   @Override
@@ -165,29 +157,26 @@ public UnresolvedPlan visitFromClause(FromClauseContext ctx) {
   }
 
   /**
-   * Ensure NESTED function is not used in HAVING clause and fallback to legacy engine.
-   * Can remove when support is added for NESTED function in HAVING clause.
+   * Ensure NESTED function is not used in HAVING clause and fallback to legacy engine. Can remove
+   * when support is added for NESTED function in HAVING clause.
+   *
    * @param func : Function in HAVING clause
    */
   private void verifySupportsCondition(UnresolvedExpression func) {
     if (func instanceof Function) {
-      if (((Function) func).getFuncName().equalsIgnoreCase(
-          BuiltinFunctionName.NESTED.name()
-      )) {
+      if (((Function) func).getFuncName().equalsIgnoreCase(BuiltinFunctionName.NESTED.name())) {
         throw new SyntaxCheckException(
-            "Falling back to legacy engine. Nested function is not supported in the HAVING clause."
-        );
+            "Falling back to legacy engine. Nested function is not supported in the HAVING"
+                + " clause.");
       }
-      ((Function)func).getFuncArgs().stream()
-          .forEach(e -> verifySupportsCondition(e)
-      );
+      ((Function) func).getFuncArgs().stream().forEach(e -> verifySupportsCondition(e));
     }
   }
 
   @Override
   public UnresolvedPlan visitTableAsRelation(TableAsRelationContext ctx) {
-    String tableAlias = (ctx.alias() == null) ? null
-        : StringUtils.unquoteIdentifier(ctx.alias().getText());
+    String tableAlias =
+        (ctx.alias() == null) ? null : StringUtils.unquoteIdentifier(ctx.alias().getText());
     return new Relation(visitAstExpression(ctx.tableName()), tableAlias);
   }
 
@@ -228,5 +217,4 @@ private UnresolvedExpression visitSelectItem(SelectElementContext ctx) {
       return new Alias(name, expr, alias);
     }
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java
index 192514250b..6dd1e02a1d 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.opensearch.sql.ast.dsl.AstDSL.between;
@@ -113,9 +112,7 @@
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.TableNameContext;
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParserBaseVisitor;
 
-/**
- * Expression builder to parse text to expression in AST.
- */
+/** Expression builder to parse text to expression in AST. */
 public class AstExpressionBuilder extends OpenSearchSQLParserBaseVisitor {
 
   @Override
@@ -141,9 +138,7 @@ public UnresolvedExpression visitQualifiedName(QualifiedNameContext ctx) {
   @Override
   public UnresolvedExpression visitMathExpressionAtom(MathExpressionAtomContext ctx) {
     return new Function(
-        ctx.mathOperator.getText(),
-        Arrays.asList(visit(ctx.left), visit(ctx.right))
-    );
+        ctx.mathOperator.getText(), Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
@@ -152,11 +147,8 @@ public UnresolvedExpression visitNestedExpressionAtom(NestedExpressionAtomContex
   }
 
   @Override
-  public UnresolvedExpression visitNestedAllFunctionCall(
-      NestedAllFunctionCallContext ctx) {
-    return new NestedAllTupleFields(
-        visitQualifiedName(ctx.allTupleFields().path).toString()
-    );
+  public UnresolvedExpression visitNestedAllFunctionCall(NestedAllFunctionCallContext ctx) {
+    return new NestedAllTupleFields(visitQualifiedName(ctx.allTupleFields().path).toString());
   }
 
   @Override
@@ -167,39 +159,36 @@ public UnresolvedExpression visitScalarFunctionCall(ScalarFunctionCallContext ct
   @Override
   public UnresolvedExpression visitGetFormatFunctionCall(GetFormatFunctionCallContext ctx) {
     return new Function(
-        ctx.getFormatFunction().GET_FORMAT().toString(),
-        getFormatFunctionArguments(ctx));
+        ctx.getFormatFunction().GET_FORMAT().toString(), getFormatFunctionArguments(ctx));
   }
 
   @Override
-  public UnresolvedExpression visitHighlightFunctionCall(
-      HighlightFunctionCallContext ctx) {
+  public UnresolvedExpression visitHighlightFunctionCall(HighlightFunctionCallContext ctx) {
     ImmutableMap.Builder builder = ImmutableMap.builder();
-    ctx.highlightFunction().highlightArg().forEach(v -> builder.put(
-        v.highlightArgName().getText().toLowerCase(),
-        new Literal(StringUtils.unquoteText(v.highlightArgValue().getText()),
-            DataType.STRING))
-    );
+    ctx.highlightFunction()
+        .highlightArg()
+        .forEach(
+            v ->
+                builder.put(
+                    v.highlightArgName().getText().toLowerCase(),
+                    new Literal(
+                        StringUtils.unquoteText(v.highlightArgValue().getText()),
+                        DataType.STRING)));
 
-    return new HighlightFunction(visit(ctx.highlightFunction().relevanceField()),
-        builder.build());
+    return new HighlightFunction(visit(ctx.highlightFunction().relevanceField()), builder.build());
   }
 
-
   @Override
   public UnresolvedExpression visitTimestampFunctionCall(TimestampFunctionCallContext ctx) {
     return new Function(
-        ctx.timestampFunction().timestampFunctionName().getText(),
-        timestampFunctionArguments(ctx));
+        ctx.timestampFunction().timestampFunctionName().getText(), timestampFunctionArguments(ctx));
   }
 
   @Override
-  public UnresolvedExpression visitPositionFunction(
-          PositionFunctionContext ctx) {
+  public UnresolvedExpression visitPositionFunction(PositionFunctionContext ctx) {
     return new Function(
-            POSITION.getName().getFunctionName(),
-            Arrays.asList(visitFunctionArg(ctx.functionArg(0)),
-                visitFunctionArg(ctx.functionArg(1))));
+        POSITION.getName().getFunctionName(),
+        Arrays.asList(visitFunctionArg(ctx.functionArg(0)), visitFunctionArg(ctx.functionArg(1))));
   }
 
   @Override
@@ -217,8 +206,7 @@ public UnresolvedExpression visitColumnFilter(ColumnFilterContext ctx) {
   }
 
   @Override
-  public UnresolvedExpression visitShowDescribePattern(
-      ShowDescribePatternContext ctx) {
+  public UnresolvedExpression visitShowDescribePattern(ShowDescribePatternContext ctx) {
     return visit(ctx.stringLiteral());
   }
 
@@ -235,21 +223,18 @@ public UnresolvedExpression visitWindowFunctionClause(WindowFunctionClauseContex
 
     List partitionByList = Collections.emptyList();
     if (overClause.partitionByClause() != null) {
-      partitionByList = overClause.partitionByClause()
-                                  .expression()
-                                  .stream()
-                                  .map(this::visit)
-                                  .collect(Collectors.toList());
+      partitionByList =
+          overClause.partitionByClause().expression().stream()
+              .map(this::visit)
+              .collect(Collectors.toList());
     }
 
     List> sortList = Collections.emptyList();
     if (overClause.orderByClause() != null) {
-      sortList = overClause.orderByClause()
-                           .orderByElement()
-                           .stream()
-                           .map(item -> ImmutablePair.of(
-                               createSortOption(item), visit(item.expression())))
-                           .collect(Collectors.toList());
+      sortList =
+          overClause.orderByClause().orderByElement().stream()
+              .map(item -> ImmutablePair.of(createSortOption(item), visit(item.expression())))
+              .collect(Collectors.toList());
     }
     return new WindowFunction(visit(ctx.function), partitionByList, sortList);
   }
@@ -262,17 +247,12 @@ public UnresolvedExpression visitScalarWindowFunction(ScalarWindowFunctionContex
   @Override
   public UnresolvedExpression visitRegularAggregateFunctionCall(
       RegularAggregateFunctionCallContext ctx) {
-    return new AggregateFunction(
-        ctx.functionName.getText(),
-        visitFunctionArg(ctx.functionArg()));
+    return new AggregateFunction(ctx.functionName.getText(), visitFunctionArg(ctx.functionArg()));
   }
 
   @Override
   public UnresolvedExpression visitDistinctCountFunctionCall(DistinctCountFunctionCallContext ctx) {
-    return new AggregateFunction(
-        ctx.COUNT().getText(),
-        visitFunctionArg(ctx.functionArg()),
-        true);
+    return new AggregateFunction(ctx.COUNT().getText(), visitFunctionArg(ctx.functionArg()), true);
   }
 
   @Override
@@ -288,18 +268,16 @@ public UnresolvedExpression visitFilterClause(FilterClauseContext ctx) {
   @Override
   public UnresolvedExpression visitIsNullPredicate(IsNullPredicateContext ctx) {
     return new Function(
-        ctx.nullNotnull().NOT() == null ? IS_NULL.getName().getFunctionName() :
-            IS_NOT_NULL.getName().getFunctionName(),
+        ctx.nullNotnull().NOT() == null
+            ? IS_NULL.getName().getFunctionName()
+            : IS_NOT_NULL.getName().getFunctionName(),
         Arrays.asList(visit(ctx.predicate())));
   }
 
   @Override
   public UnresolvedExpression visitBetweenPredicate(BetweenPredicateContext ctx) {
     UnresolvedExpression func =
-        between(
-            visit(ctx.predicate(0)),
-            visit(ctx.predicate(1)),
-            visit(ctx.predicate(2)));
+        between(visit(ctx.predicate(0)), visit(ctx.predicate(1)), visit(ctx.predicate(2)));
 
     if (ctx.NOT() != null) {
       func = not(func);
@@ -310,26 +288,21 @@ public UnresolvedExpression visitBetweenPredicate(BetweenPredicateContext ctx) {
   @Override
   public UnresolvedExpression visitLikePredicate(LikePredicateContext ctx) {
     return new Function(
-        ctx.NOT() == null ? LIKE.getName().getFunctionName() :
-            NOT_LIKE.getName().getFunctionName(),
+        ctx.NOT() == null ? LIKE.getName().getFunctionName() : NOT_LIKE.getName().getFunctionName(),
         Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
   public UnresolvedExpression visitRegexpPredicate(RegexpPredicateContext ctx) {
-    return new Function(REGEXP.getName().getFunctionName(),
-            Arrays.asList(visit(ctx.left), visit(ctx.right)));
+    return new Function(
+        REGEXP.getName().getFunctionName(), Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
   public UnresolvedExpression visitInPredicate(InPredicateContext ctx) {
     UnresolvedExpression field = visit(ctx.predicate());
-    List inLists = ctx
-        .expressions()
-        .expression()
-        .stream()
-        .map(this::visit)
-        .collect(Collectors.toList());
+    List inLists =
+        ctx.expressions().expression().stream().map(this::visit).collect(Collectors.toList());
     UnresolvedExpression in = AstDSL.in(field, inLists);
     return ctx.NOT() != null ? AstDSL.not(in) : in;
   }
@@ -394,34 +367,30 @@ public UnresolvedExpression visitTimeLiteral(TimeLiteralContext ctx) {
   }
 
   @Override
-  public UnresolvedExpression visitTimestampLiteral(
-      TimestampLiteralContext ctx) {
+  public UnresolvedExpression visitTimestampLiteral(TimestampLiteralContext ctx) {
     return AstDSL.timestampLiteral(StringUtils.unquoteText(ctx.timestamp.getText()));
   }
 
   @Override
   public UnresolvedExpression visitIntervalLiteral(IntervalLiteralContext ctx) {
-    return new Interval(
-        visit(ctx.expression()), IntervalUnit.of(ctx.intervalUnit().getText()));
+    return new Interval(visit(ctx.expression()), IntervalUnit.of(ctx.intervalUnit().getText()));
   }
 
   @Override
-  public UnresolvedExpression visitBinaryComparisonPredicate(
-      BinaryComparisonPredicateContext ctx) {
+  public UnresolvedExpression visitBinaryComparisonPredicate(BinaryComparisonPredicateContext ctx) {
     String functionName = ctx.comparisonOperator().getText();
     return new Function(
         functionName.equals("<>") ? "!=" : functionName,
-        Arrays.asList(visit(ctx.left), visit(ctx.right))
-    );
+        Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
   public UnresolvedExpression visitCaseFunctionCall(CaseFunctionCallContext ctx) {
     UnresolvedExpression caseValue = (ctx.expression() == null) ? null : visit(ctx.expression());
-    List whenStatements = ctx.caseFuncAlternative()
-                                   .stream()
-                                   .map(when -> (When) visit(when))
-                                   .collect(Collectors.toList());
+    List whenStatements =
+        ctx.caseFuncAlternative().stream()
+            .map(when -> (When) visit(when))
+            .collect(Collectors.toList());
     UnresolvedExpression elseStatement = (ctx.elseArg == null) ? null : visit(ctx.elseArg);
 
     return new Case(caseValue, whenStatements, elseStatement);
@@ -433,23 +402,19 @@ public UnresolvedExpression visitCaseFuncAlternative(CaseFuncAlternativeContext
   }
 
   @Override
-  public UnresolvedExpression visitDataTypeFunctionCall(
-      DataTypeFunctionCallContext ctx) {
+  public UnresolvedExpression visitDataTypeFunctionCall(DataTypeFunctionCallContext ctx) {
     return new Cast(visit(ctx.expression()), visit(ctx.convertedDataType()));
   }
 
   @Override
-  public UnresolvedExpression visitConvertedDataType(
-      ConvertedDataTypeContext ctx) {
+  public UnresolvedExpression visitConvertedDataType(ConvertedDataTypeContext ctx) {
     return AstDSL.stringLiteral(ctx.getText());
   }
 
   @Override
-  public UnresolvedExpression visitNoFieldRelevanceFunction(
-          NoFieldRelevanceFunctionContext ctx) {
+  public UnresolvedExpression visitNoFieldRelevanceFunction(NoFieldRelevanceFunctionContext ctx) {
     return new Function(
-            ctx.noFieldRelevanceFunctionName().getText().toLowerCase(),
-            noFieldRelevanceArguments(ctx));
+        ctx.noFieldRelevanceFunctionName().getText().toLowerCase(), noFieldRelevanceArguments(ctx));
   }
 
   @Override
@@ -475,10 +440,9 @@ public UnresolvedExpression visitMultiFieldRelevanceFunction(
     // 'MULTI_MATCH('query'='query_val', 'fields'='*fields_val')'
     String funcName = StringUtils.unquoteText(ctx.multiFieldRelevanceFunctionName().getText());
     if ((funcName.equalsIgnoreCase(BuiltinFunctionName.MULTI_MATCH.toString())
-        || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCH.toString())
-        || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCHQUERY.toString()))
-        && !ctx.getRuleContexts(AlternateMultiMatchQueryContext.class)
-        .isEmpty()) {
+            || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCH.toString())
+            || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCHQUERY.toString()))
+        && !ctx.getRuleContexts(AlternateMultiMatchQueryContext.class).isEmpty()) {
       return new Function(
           ctx.multiFieldRelevanceFunctionName().getText().toLowerCase(),
           alternateMultiMatchArguments(ctx));
@@ -511,78 +475,81 @@ public UnresolvedExpression visitScoreRelevanceFunction(ScoreRelevanceFunctionCo
     return new ScoreFunction(visit(ctx.relevanceFunction()), weight);
   }
 
-  private Function buildFunction(String functionName,
-                                 List arg) {
+  private Function buildFunction(String functionName, List arg) {
     return new Function(
-        functionName,
-        arg
-            .stream()
-            .map(this::visitFunctionArg)
-            .collect(Collectors.toList())
-    );
+        functionName, arg.stream().map(this::visitFunctionArg).collect(Collectors.toList()));
   }
 
   @Override
   public UnresolvedExpression visitExtractFunctionCall(ExtractFunctionCallContext ctx) {
     return new Function(
-        ctx.extractFunction().EXTRACT().toString(),
-        getExtractFunctionArguments(ctx));
+        ctx.extractFunction().EXTRACT().toString(), getExtractFunctionArguments(ctx));
   }
 
-
   private QualifiedName visitIdentifiers(List identifiers) {
     return new QualifiedName(
         identifiers.stream()
-                   .map(RuleContext::getText)
-                   .map(StringUtils::unquoteIdentifier)
-                   .collect(Collectors.toList()));
+            .map(RuleContext::getText)
+            .map(StringUtils::unquoteIdentifier)
+            .collect(Collectors.toList()));
   }
 
-  private void fillRelevanceArgs(List args,
-                                 ImmutableList.Builder builder) {
+  private void fillRelevanceArgs(
+      List args, ImmutableList.Builder builder) {
     // To support old syntax we must support argument keys as quoted strings.
-    args.forEach(v -> builder.add(v.argName == null
-        ? new UnresolvedArgument(v.relevanceArgName().getText().toLowerCase(),
-            new Literal(StringUtils.unquoteText(v.relevanceArgValue().getText()),
-            DataType.STRING))
-        : new UnresolvedArgument(StringUtils.unquoteText(v.argName.getText()).toLowerCase(),
-            new Literal(StringUtils.unquoteText(v.argVal.getText()), DataType.STRING))));
+    args.forEach(
+        v ->
+            builder.add(
+                v.argName == null
+                    ? new UnresolvedArgument(
+                        v.relevanceArgName().getText().toLowerCase(),
+                        new Literal(
+                            StringUtils.unquoteText(v.relevanceArgValue().getText()),
+                            DataType.STRING))
+                    : new UnresolvedArgument(
+                        StringUtils.unquoteText(v.argName.getText()).toLowerCase(),
+                        new Literal(
+                            StringUtils.unquoteText(v.argVal.getText()), DataType.STRING))));
   }
 
   private List noFieldRelevanceArguments(
-          NoFieldRelevanceFunctionContext ctx) {
+      NoFieldRelevanceFunctionContext ctx) {
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    builder.add(new UnresolvedArgument("query",
-            new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
   private List singleFieldRelevanceArguments(
-        SingleFieldRelevanceFunctionContext ctx) {
+      SingleFieldRelevanceFunctionContext ctx) {
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    builder.add(new UnresolvedArgument("field",
-        new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
-
   private List altSingleFieldRelevanceFunctionArguments(
       AltSingleFieldRelevanceFunctionContext ctx) {
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    builder.add(new UnresolvedArgument("field",
-        new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
@@ -592,43 +559,45 @@ private List multiFieldRelevanceArguments(
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    var fields = new RelevanceFieldList(ctx
-        .getRuleContexts(RelevanceFieldAndWeightContext.class)
-        .stream()
-        .collect(Collectors.toMap(
-            f -> StringUtils.unquoteText(f.field.getText()),
-            f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText()))));
+    var fields =
+        new RelevanceFieldList(
+            ctx.getRuleContexts(RelevanceFieldAndWeightContext.class).stream()
+                .collect(
+                    Collectors.toMap(
+                        f -> StringUtils.unquoteText(f.field.getText()),
+                        f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText()))));
     builder.add(new UnresolvedArgument("fields", fields));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
-  private List getFormatFunctionArguments(
-      GetFormatFunctionCallContext ctx) {
-    List args = Arrays.asList(
-        new Literal(ctx.getFormatFunction().getFormatType().getText(), DataType.STRING),
-        visitFunctionArg(ctx.getFormatFunction().functionArg())
-    );
+  private List getFormatFunctionArguments(GetFormatFunctionCallContext ctx) {
+    List args =
+        Arrays.asList(
+            new Literal(ctx.getFormatFunction().getFormatType().getText(), DataType.STRING),
+            visitFunctionArg(ctx.getFormatFunction().functionArg()));
     return args;
   }
 
-  private List timestampFunctionArguments(
-      TimestampFunctionCallContext ctx) {
-    List args = Arrays.asList(
-        new Literal(
-            ctx.timestampFunction().simpleDateTimePart().getText(),
-            DataType.STRING),
-        visitFunctionArg(ctx.timestampFunction().firstArg),
-        visitFunctionArg(ctx.timestampFunction().secondArg)
-    );
+  private List timestampFunctionArguments(TimestampFunctionCallContext ctx) {
+    List args =
+        Arrays.asList(
+            new Literal(ctx.timestampFunction().simpleDateTimePart().getText(), DataType.STRING),
+            visitFunctionArg(ctx.timestampFunction().firstArg),
+            visitFunctionArg(ctx.timestampFunction().secondArg));
     return args;
   }
 
   /**
+   *
+   *
+   * 
    * Adds support for multi_match alternate syntax like
    * MULTI_MATCH('query'='Dale', 'fields'='*name').
+   * 
* * @param ctx : Context for multi field relevance function. * @return : Returns list of all arguments for relevance function. @@ -640,25 +609,32 @@ private List alternateMultiMatchArguments( ImmutableList.Builder builder = ImmutableList.builder(); Map fieldAndWeightMap = new HashMap<>(); - String[] fieldAndWeights = StringUtils.unquoteText( - ctx.getRuleContexts(AlternateMultiMatchFieldContext.class) - .stream().findFirst().get().argVal.getText()).split(","); + String[] fieldAndWeights = + StringUtils.unquoteText( + ctx.getRuleContexts(AlternateMultiMatchFieldContext.class).stream() + .findFirst() + .get() + .argVal + .getText()) + .split(","); for (var fieldAndWeight : fieldAndWeights) { String[] splitFieldAndWeights = fieldAndWeight.split("\\^"); - fieldAndWeightMap.put(splitFieldAndWeights[0], + fieldAndWeightMap.put( + splitFieldAndWeights[0], splitFieldAndWeights.length > 1 ? Float.parseFloat(splitFieldAndWeights[1]) : 1F); } - builder.add(new UnresolvedArgument("fields", - new RelevanceFieldList(fieldAndWeightMap))); - - ctx.getRuleContexts(AlternateMultiMatchQueryContext.class) - .stream().findFirst().ifPresent( - arg -> - builder.add(new UnresolvedArgument("query", + builder.add(new UnresolvedArgument("fields", new RelevanceFieldList(fieldAndWeightMap))); + + ctx.getRuleContexts(AlternateMultiMatchQueryContext.class).stream() + .findFirst() + .ifPresent( + arg -> + builder.add( + new UnresolvedArgument( + "query", new Literal( - StringUtils.unquoteText(arg.argVal.getText()), DataType.STRING))) - ); + StringUtils.unquoteText(arg.argVal.getText()), DataType.STRING)))); fillRelevanceArgs(ctx.relevanceArg(), builder); @@ -674,18 +650,18 @@ private List altMultiFieldRelevanceFunctionArguments( ImmutableList.Builder builder = ImmutableList.builder(); var fields = new RelevanceFieldList(map); builder.add(new UnresolvedArgument("fields", fields)); - builder.add(new UnresolvedArgument("query", - new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); + builder.add( + new UnresolvedArgument( + "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); fillRelevanceArgs(ctx.relevanceArg(), builder); return builder.build(); } - private List getExtractFunctionArguments( - ExtractFunctionCallContext ctx) { - List args = Arrays.asList( - new Literal(ctx.extractFunction().datetimePart().getText(), DataType.STRING), - visitFunctionArg(ctx.extractFunction().functionArg()) - ); + private List getExtractFunctionArguments(ExtractFunctionCallContext ctx) { + List args = + Arrays.asList( + new Literal(ctx.extractFunction().datetimePart().getText(), DataType.STRING), + visitFunctionArg(ctx.extractFunction().functionArg())); return args; } } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java index f90ea2f991..94c11d05af 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.QualifiedNameContext; @@ -13,10 +12,9 @@ import org.opensearch.sql.sql.parser.context.QuerySpecification; /** - * AST Having filter builder that builds HAVING clause condition expressions - * and replace alias by original expression in SELECT clause. - * The reason for this is it's hard to replace afterwards since UnresolvedExpression - * is immutable. + * AST Having filter builder that builds HAVING clause condition expressions and replace alias by + * original expression in SELECT clause. The reason for this is it's hard to replace afterwards + * since UnresolvedExpression is immutable. */ @RequiredArgsConstructor public class AstHavingFilterBuilder extends AstExpressionBuilder { @@ -34,5 +32,4 @@ private UnresolvedExpression replaceAlias(UnresolvedExpression expr) { } return expr; } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java index 1b872dce54..2594709f4f 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.opensearch.sql.ast.dsl.AstDSL.booleanLiteral; @@ -27,8 +26,8 @@ import org.opensearch.sql.sql.parser.context.QuerySpecification; /** - * AST sort builder that builds Sort AST node from ORDER BY clause. During this process, the item - * in order by may be replaced by item in project list if it's an alias or ordinal. This is same as + * AST sort builder that builds Sort AST node from ORDER BY clause. During this process, the item in + * order by may be replaced by item in project list if it's an alias or ordinal. This is same as * GROUP BY building process. */ @RequiredArgsConstructor @@ -38,9 +37,7 @@ public class AstSortBuilder extends OpenSearchSQLParserBaseVisitor createSortFields() { @@ -57,8 +54,8 @@ private List createSortFields() { } /** - * Argument "asc" is required. - * Argument "nullFirst" is optional and determined by Analyzer later if absent. + * Argument "asc" is required. Argument "nullFirst" is optional and determined by Analyzer later + * if absent. */ private List createSortArguments(SortOption option) { SortOrder sortOrder = option.getSortOrder(); @@ -71,5 +68,4 @@ private List createSortArguments(SortOption option) { } return args.build(); } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java b/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java index 947dca51b9..3c60d43733 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.opensearch.sql.ast.tree.Sort.NullOrder; @@ -16,33 +15,24 @@ import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.TerminalNode; -/** - * Parser Utils Class. - */ +/** Parser Utils Class. */ @UtilityClass public class ParserUtils { - /** - * Get original text in query. - */ + /** Get original text in query. */ public static String getTextInQuery(ParserRuleContext ctx, String queryString) { Token start = ctx.getStart(); Token stop = ctx.getStop(); return queryString.substring(start.getStartIndex(), stop.getStopIndex() + 1); } - /** - * Create sort option from syntax tree node. - */ + /** Create sort option from syntax tree node. */ public static SortOption createSortOption(OrderByElementContext orderBy) { return new SortOption( - createSortOrder(orderBy.order), - createNullOrder(orderBy.FIRST(), orderBy.LAST())); + createSortOrder(orderBy.order), createNullOrder(orderBy.FIRST(), orderBy.LAST())); } - /** - * Create sort order for sort option use from ASC/DESC token. - */ + /** Create sort order for sort option use from ASC/DESC token. */ public static SortOrder createSortOrder(Token ctx) { if (ctx == null) { return null; @@ -50,9 +40,7 @@ public static SortOrder createSortOrder(Token ctx) { return SortOrder.valueOf(ctx.getText().toUpperCase()); } - /** - * Create null order for sort option use from FIRST/LAST token. - */ + /** Create null order for sort option use from FIRST/LAST token. */ public static NullOrder createNullOrder(TerminalNode first, TerminalNode last) { if (first != null) { return NullOrder.NULL_FIRST; @@ -62,5 +50,4 @@ public static NullOrder createNullOrder(TerminalNode first, TerminalNode last) { return null; } } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java b/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java index 33b313367d..297fdfd749 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java @@ -3,21 +3,20 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser.context; import java.util.ArrayDeque; import java.util.Deque; /** - * SQL parsing context that maintains stack of query specifications for nested queries. - * Currently this is just a thin wrapper by a stack. + * SQL parsing context that maintains stack of query specifications for nested queries. Currently + * this is just a thin wrapper by a stack. */ public class ParsingContext { /** - * Use stack rather than linked query specification because there is no need - * to look up through the stack. + * Use stack rather than linked query specification because there is no need to look up through + * the stack. */ private final Deque contexts = new ArrayDeque<>(); @@ -31,10 +30,10 @@ public QuerySpecification peek() { /** * Pop up query context. - * @return query context after popup. + * + * @return query context after popup. */ public QuerySpecification pop() { return contexts.pop(); } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java b/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java index 21dddde2b9..5625371f05 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser.context; import static org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.FilteredAggregationFunctionCallContext; @@ -42,6 +41,7 @@ /** * Query specification domain that collects basic info for a simple query. + * *
  * (I) What is the impact of this new abstraction?
  *  This abstraction and collecting process turns AST building process into two phases:
@@ -61,10 +61,9 @@
 @ToString
 public class QuerySpecification {
 
-  /**
-   * Items in SELECT clause and mapping from alias to select item.
-   */
+  /** Items in SELECT clause and mapping from alias to select item. */
   private final List selectItems = new ArrayList<>();
+
   private final Map selectItemsByAlias = new HashMap<>();
 
   /**
@@ -74,31 +73,36 @@ public class QuerySpecification {
   private final Set aggregators = new LinkedHashSet<>();
 
   /**
+   *
+   *
+   * 
    * Items in GROUP BY clause that may be:
    *  1) Simple field name
    *  2) Field nested in scalar function call
    *  3) Ordinal that points to expression in SELECT
    *  4) Alias that points to expression in SELECT.
+   *  
*/ private final List groupByItems = new ArrayList<>(); - /** - * Items in ORDER BY clause that may be different forms as above and its options. - */ + /** Items in ORDER BY clause that may be different forms as above and its options. */ private final List orderByItems = new ArrayList<>(); + private final List orderByOptions = new ArrayList<>(); /** * Collect all query information in the parse tree excluding info in sub-query). - * @param query query spec node in parse tree + * + * @param query query spec node in parse tree */ public void collect(QuerySpecificationContext query, String queryString) { query.accept(new QuerySpecificationCollector(queryString)); } /** - * Replace unresolved expression if it's an alias or ordinal that represents - * an actual expression in SELECT list. + * Replace unresolved expression if it's an alias or ordinal that represents an actual expression + * in SELECT list. + * * @param expr item to be replaced * @return select item that the given expr represents */ @@ -118,8 +122,8 @@ private boolean isIntegerLiteral(UnresolvedExpression expr) { } if (((Literal) expr).getType() != DataType.INTEGER) { - throw new SemanticCheckException(StringUtils.format( - "Non-integer constant [%s] found in ordinal", expr)); + throw new SemanticCheckException( + StringUtils.format("Non-integer constant [%s] found in ordinal", expr)); } return true; } @@ -127,25 +131,26 @@ private boolean isIntegerLiteral(UnresolvedExpression expr) { private UnresolvedExpression getSelectItemByOrdinal(UnresolvedExpression expr) { int ordinal = (Integer) ((Literal) expr).getValue(); if (ordinal <= 0 || ordinal > selectItems.size()) { - throw new SemanticCheckException(StringUtils.format( - "Ordinal [%d] is out of bound of select item list", ordinal)); + throw new SemanticCheckException( + StringUtils.format("Ordinal [%d] is out of bound of select item list", ordinal)); } return selectItems.get(ordinal - 1); } /** * Check if an expression is a select alias. - * @param expr expression + * + * @param expr expression * @return true if it's an alias */ public boolean isSelectAlias(UnresolvedExpression expr) { - return (expr instanceof QualifiedName) - && (selectItemsByAlias.containsKey(expr.toString())); + return (expr instanceof QualifiedName) && (selectItemsByAlias.containsKey(expr.toString())); } /** * Get original expression aliased in SELECT clause. - * @param expr alias + * + * @param expr alias * @return expression in SELECT */ public UnresolvedExpression getSelectItemByAlias(UnresolvedExpression expr) { @@ -223,8 +228,7 @@ public Void visitAggregateFunctionCall(AggregateFunctionCallContext ctx) { @Override public Void visitFilteredAggregationFunctionCall(FilteredAggregationFunctionCallContext ctx) { UnresolvedExpression aggregateFunction = visitAstExpression(ctx); - aggregators.add( - AstDSL.alias(getTextInQuery(ctx, queryString), aggregateFunction)); + aggregators.add(AstDSL.alias(getTextInQuery(ctx, queryString), aggregateFunction)); return super.visitFilteredAggregationFunctionCall(ctx); } @@ -236,5 +240,4 @@ private UnresolvedExpression visitAstExpression(ParseTree tree) { return expressionBuilder.visit(tree); } } - } diff --git a/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java b/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java index 63d7666c62..87f2083774 100644 --- a/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java +++ b/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java @@ -7,16 +7,14 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * A base class for tests for SQL or PPL parser. - */ +/** A base class for tests for SQL or PPL parser. */ @RequiredArgsConstructor(access = AccessLevel.PROTECTED) public abstract class SyntaxParserTestBase { - @Getter - private final Parser parser; + @Getter private final Parser parser; /** * A helper function that fails a test if the parser rejects a given query. + * * @param query Query to test. */ protected void acceptQuery(String query) { @@ -25,6 +23,7 @@ protected void acceptQuery(String query) { /** * A helper function that fails a test if the parser accepts a given query. + * * @param query Query to test. */ protected void rejectQuery(String query) { diff --git a/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java b/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java index f4342d877d..8cb2994dc3 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -45,14 +44,13 @@ class SQLServiceTest { private DefaultQueryManager queryManager; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; @BeforeEach public void setUp() { queryManager = DefaultQueryManager.defaultQueryManager(); - sqlService = new SQLService(new SQLSyntaxParser(), queryManager, - new QueryPlanFactory(queryService)); + sqlService = + new SQLService(new SQLSyntaxParser(), queryManager, new QueryPlanFactory(queryService)); } @AfterEach @@ -97,8 +95,8 @@ public void onFailure(Exception e) { @Test public void can_execute_close_cursor_query() { sqlService.execute( - new SQLQueryRequest(new JSONObject(), null, QUERY + "/close", - Map.of("format", "jdbc"), "n:cursor"), + new SQLQueryRequest( + new JSONObject(), null, QUERY + "/close", Map.of("format", "jdbc"), "n:cursor"), new ResponseListener<>() { @Override public void onResponse(QueryResponse response) { @@ -131,13 +129,17 @@ public void onFailure(Exception e) { @Test public void can_explain_sql_query() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new ExplainResponse(new ExplainResponseNode("Test"))); - return null; - }).when(queryService).explain(any(), any()); + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new ExplainResponse(new ExplainResponseNode("Test"))); + return null; + }) + .when(queryService) + .explain(any(), any()); - sqlService.explain(new SQLQueryRequest(new JSONObject(), "SELECT 123", EXPLAIN, "csv"), + sqlService.explain( + new SQLQueryRequest(new JSONObject(), "SELECT 123", EXPLAIN, "csv"), new ResponseListener() { @Override public void onResponse(ExplainResponse response) { @@ -153,8 +155,8 @@ public void onFailure(Exception e) { @Test public void cannot_explain_cursor_query() { - sqlService.explain(new SQLQueryRequest(new JSONObject(), null, EXPLAIN, - Map.of("format", "jdbc"), "n:cursor"), + sqlService.explain( + new SQLQueryRequest(new JSONObject(), null, EXPLAIN, Map.of("format", "jdbc"), "n:cursor"), new ResponseListener() { @Override public void onResponse(ExplainResponse response) { @@ -163,8 +165,10 @@ public void onResponse(ExplainResponse response) { @Override public void onFailure(Exception e) { - assertEquals("Explain of a paged query continuation is not supported." - + " Use `explain` for the initial query request.", e.getMessage()); + assertEquals( + "Explain of a paged query continuation is not supported." + + " Use `explain` for the initial query request.", + e.getMessage()); } }); } diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java index 0f7a284aa7..120cd233fc 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import org.junit.jupiter.api.Test; diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java index 6826a37c0b..a0dbc2fc02 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java @@ -15,14 +15,14 @@ void single_field_test() { @Test void multiple_highlights_test() { - acceptQuery("SELECT HIGHLIGHT(Tags), HIGHLIGHT(Body) FROM Index " - + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); + acceptQuery( + "SELECT HIGHLIGHT(Tags), HIGHLIGHT(Body) FROM Index " + + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); } @Test void wildcard_test() { - acceptQuery("SELECT HIGHLIGHT('T*') FROM Index " - + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); + acceptQuery("SELECT HIGHLIGHT('T*') FROM Index WHERE MULTI_MATCH([Tags, Body], 'Time')"); } @Test @@ -33,13 +33,12 @@ void highlight_all_test() { @Test void multiple_parameters_failure_test() { - rejectQuery("SELECT HIGHLIGHT(Tags1, Tags2) FROM Index " - + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); + rejectQuery( + "SELECT HIGHLIGHT(Tags1, Tags2) FROM Index WHERE MULTI_MATCH([Tags, Body], 'Time')"); } @Test void no_parameters_failure_test() { - rejectQuery("SELECT HIGHLIGHT() FROM Index " - + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); + rejectQuery("SELECT HIGHLIGHT() FROM Index WHERE MULTI_MATCH([Tags, Body], 'Time')"); } } diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java index 66c4d5be9d..db5ce18edb 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java @@ -25,14 +25,13 @@ static Stream generateValidArguments() { new String("max_expansions=50"), new String("fuzzy_transpositions=true"), new String("fuzzy_rewrite=constant_score"), - new String("boost=1") - ); + new String("boost=1")); } @ParameterizedTest @MethodSource("generateValidArguments") public void testValidArguments(String arg) { - acceptQuery("SELECT * FROM T WHERE MATCH_BOOL_PREFIX(message, 'query', " + arg + ")"); + acceptQuery("SELECT * FROM T WHERE MATCH_BOOL_PREFIX(message, 'query', " + arg + ")"); } @Test diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java index 3f323725ab..db091a4932 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import org.opensearch.sql.common.antlr.SyntaxParserTestBase; diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java index ade4983f58..f68c27deea 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import static org.junit.jupiter.api.Assertions.assertAll; @@ -73,8 +72,7 @@ public void canParseHiddenIndexName() { @Test public void canNotParseIndexNameWithSpecialChar() { - assertThrows(SyntaxCheckException.class, - () -> parser.parse("SELECT * FROM hello+world")); + assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM hello+world")); } @Test @@ -84,14 +82,12 @@ public void canParseIndexNameWithSpecialCharQuoted() { @Test public void canNotParseIndexNameStartingWithNumber() { - assertThrows(SyntaxCheckException.class, - () -> parser.parse("SELECT * FROM 123test")); + assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM 123test")); } @Test public void canNotParseIndexNameSingleQuoted() { - assertThrows(SyntaxCheckException.class, - () -> parser.parse("SELECT * FROM 'test'")); + assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM 'test'")); } @Test @@ -101,14 +97,15 @@ public void canParseWhereClause() { @Test public void canParseSelectClauseWithLogicalOperator() { - assertNotNull(parser.parse( - "SELECT age = 10 AND name = 'John' OR NOT (balance > 1000) FROM test")); + assertNotNull( + parser.parse("SELECT age = 10 AND name = 'John' OR NOT (balance > 1000) FROM test")); } @Test public void canParseWhereClauseWithLogicalOperator() { - assertNotNull(parser.parse("SELECT name FROM test " - + "WHERE age = 10 AND name = 'John' OR NOT (balance > 1000)")); + assertNotNull( + parser.parse( + "SELECT name FROM test WHERE age = 10 AND name = 'John' OR NOT (balance > 1000)")); } @Test @@ -128,9 +125,11 @@ public void canParseDistinctClause() { @Test public void canParseCaseStatement() { assertNotNull(parser.parse("SELECT CASE WHEN age > 30 THEN 'age1' ELSE 'age2' END FROM test")); - assertNotNull(parser.parse("SELECT CASE WHEN age > 30 THEN 'age1' " - + " WHEN age < 50 THEN 'age2' " - + " ELSE 'age3' END FROM test")); + assertNotNull( + parser.parse( + "SELECT CASE WHEN age > 30 THEN 'age1' " + + " WHEN age < 50 THEN 'age2' " + + " ELSE 'age3' END FROM test")); assertNotNull(parser.parse("SELECT CASE age WHEN 30 THEN 'age1' ELSE 'age2' END FROM test")); assertNotNull(parser.parse("SELECT CASE age WHEN 30 THEN 'age1' END FROM test")); } @@ -147,10 +146,11 @@ public void canNotParseAggregateFunctionWithWrongArgument() { public void canParseOrderByClause() { assertNotNull(parser.parse("SELECT name, age FROM test ORDER BY name, age")); assertNotNull(parser.parse("SELECT name, age FROM test ORDER BY name ASC, age DESC")); - assertNotNull(parser.parse( - "SELECT name, age FROM test ORDER BY name NULLS LAST, age NULLS FIRST")); - assertNotNull(parser.parse( - "SELECT name, age FROM test ORDER BY name ASC NULLS FIRST, age DESC NULLS LAST")); + assertNotNull( + parser.parse("SELECT name, age FROM test ORDER BY name NULLS LAST, age NULLS FIRST")); + assertNotNull( + parser.parse( + "SELECT name, age FROM test ORDER BY name ASC NULLS FIRST, age DESC NULLS LAST")); } @Test @@ -171,8 +171,7 @@ private static Stream nowLikeFunctionsData() { Arguments.of("current_date", false, true), Arguments.of("utc_date", false, true), Arguments.of("utc_time", false, true), - Arguments.of("utc_timestamp", false, true) - ); + Arguments.of("utc_timestamp", false, true)); } private static Stream getPartForExtractFunction() { @@ -196,8 +195,7 @@ private static Stream getPartForExtractFunction() { Arguments.of("DAY_SECOND"), Arguments.of("DAY_MINUTE"), Arguments.of("DAY_HOUR"), - Arguments.of("YEAR_MONTH") - ); + Arguments.of("YEAR_MONTH")); } @ParameterizedTest(name = "{0}") @@ -207,11 +205,7 @@ public void can_parse_extract_function(String part) { } private static Stream getInvalidPartForExtractFunction() { - return Stream.of( - Arguments.of("INVALID"), - Arguments.of("\"SECOND\""), - Arguments.of("123") - ); + return Stream.of(Arguments.of("INVALID"), Arguments.of("\"SECOND\""), Arguments.of("123")); } @ParameterizedTest(name = "{0}") @@ -231,9 +225,12 @@ public void can_parse_weekday_function() { @ParameterizedTest(name = "{0}") @MethodSource("nowLikeFunctionsData") public void can_parse_now_like_functions(String name, Boolean hasFsp, Boolean hasShortcut) { - var calls = new ArrayList() {{ - add(name + "()"); - }}; + var calls = + new ArrayList() { + { + add(name + "()"); + } + }; if (hasShortcut) { calls.add(name); } @@ -270,8 +267,7 @@ public void can_parse_get_format_function(String type, String format) { @Test public void cannot_parse_get_format_function_with_bad_arg() { assertThrows( - SyntaxCheckException.class, - () -> parser.parse("GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); + SyntaxCheckException.class, () -> parser.parse("GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); } @Test @@ -326,53 +322,55 @@ public void can_parse_month_of_year_function() { assertNotNull(parser.parse("SELECT month(timestamp('2022-11-18 00:00:00'))")); assertNotNull(parser.parse("SELECT month_of_year(timestamp('2022-11-18 00:00:00'))")); - } @Test public void can_parse_multi_match_relevance_function() { - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multimatch(\"fields\"=\"field\", query=\"query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multimatchquery(fields=\"field\", \"query\"=\"query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(\"fields\"=\"field\", \"query\"=\"query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(\'fields\'=\'field\', \'query\'=\'query\')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(fields=\'field\', query=\'query\')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address', 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match([\"*\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match([\"address\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match([`address`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match([address], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address', 'notes' 3], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword," - + "operator='AND', tie_breaker=0.3, type = \"most_fields\", fuzziness = \"AUTO\")")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multimatch(\"fields\"=\"field\", query=\"query\")")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multimatchquery(fields=\"field\", \"query\"=\"query\")")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multi_match(\"fields\"=\"field\", \"query\"=\"query\")")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multi_match(\'fields\'=\'field\', \'query\'=\'query\')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(fields=\'field\', query=\'query\')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match(['address'], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(['address', 'notes'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([\"*\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([\"address\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([`address`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([address], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(['address', 'notes' 3], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," + + " analyzer=keyword,operator='AND', tie_breaker=0.3, type = \"most_fields\"," + + " fuzziness = \"AUTO\")")); } @Test @@ -385,160 +383,137 @@ public void can_parse_second_functions() { @Test public void can_parse_simple_query_string_relevance_function() { - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address', 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string([\"*\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string([\"address\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string([`address`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string([address], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address', 'notes' 3], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword," - + "flags='AND', quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE simple_query_string(['address'], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address', 'notes'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE simple_query_string([\"*\"], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE simple_query_string([\"address\"], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE simple_query_string([`address`], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE simple_query_string([address], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address', 'notes' 3], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2]," + + " 'query', analyzer=keyword,flags='AND', quote_field_suffix=\".exact\"," + + " fuzzy_prefix_length = 4)")); } @Test public void can_parse_str_to_date() { - assertNotNull(parser.parse( - "SELECT STR_TO_DATE('01,5,2013','%d,%m,%Y')" - )); + assertNotNull(parser.parse("SELECT STR_TO_DATE('01,5,2013','%d,%m,%Y')")); - assertNotNull(parser.parse( - "SELECT STR_TO_DATE('a09:30:17','a%h:%i:%s')" - )); + assertNotNull(parser.parse("SELECT STR_TO_DATE('a09:30:17','a%h:%i:%s')")); - assertNotNull(parser.parse( - "SELECT STR_TO_DATE('abc','abc');" - )); + assertNotNull(parser.parse("SELECT STR_TO_DATE('abc','abc');")); } @Test public void can_parse_query_string_relevance_function() { - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['*'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['add*'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['*ess'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address', 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"*\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"address\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"ad*\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"*s\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"address\", \"notes\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`*`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`address`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`ad*`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`*ss`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`address`, `notes`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([address], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([addr*], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([*ss], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([address, notes], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address', 'notes' 3], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword," - + "operator='AND', tie_breaker=0.3, type = \"most_fields\", fuzziness = 4)")); - } + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['*'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['address'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['add*'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['*ess'], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string(['address', 'notes'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"*\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"address\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"ad*\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"*s\"], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string([\"address\", \"notes\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`*`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`address`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`ad*`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`*ss`], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string([`address`, `notes`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([address], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([addr*], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([*ss], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string([address, notes], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string(['address', 'notes' 3], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword," + + "operator='AND', tie_breaker=0.3, type = \"most_fields\", fuzziness = 4)")); + } @Test public void can_parse_query_relevance_function() { - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:query OR notes:query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:query OR notes:query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`address:query`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`address:query OR notes:query`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('*:query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"*:query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`*:query`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:*uery OR notes:?uery')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:*uery OR notes:?uery\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`address:*uery OR notes:?uery`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:qu*ry OR notes:qu?ry')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:qu*ry OR notes:qu?ry\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`address:qu*ry OR notes:qu?ry`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:query notes:query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:query notes:query\")")); - assertNotNull(parser.parse( + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:query OR notes:query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(\"address:query\")")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query(\"address:query OR notes:query\")")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:query`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:query OR notes:query`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('*:query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(\"*:query\")")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`*:query`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:*uery OR notes:?uery')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query(\"address:*uery OR notes:?uery\")")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:*uery OR notes:?uery`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:qu*ry OR notes:qu?ry')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query(\"address:qu*ry OR notes:qu?ry\")")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:qu*ry OR notes:qu?ry`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:query notes:query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(\"address:query notes:query\")")); + assertNotNull( + parser.parse( "SELECT id FROM test WHERE " - + "query(\"Body:\'taste beer\' Tags:\'taste beer\' Title:\'taste beer\'\")")); + + "query(\"Body:\'taste beer\' Tags:\'taste beer\' Title:\'taste beer\'\")")); } - @Test public void can_parse_match_relevance_function() { assertNotNull(parser.parse("SELECT * FROM test WHERE match(column, \"this is a test\")")); @@ -552,19 +527,18 @@ public void can_parse_match_relevance_function() { public void can_parse_matchquery_relevance_function() { assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(column, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(column, 'this is a test')")); - assertNotNull(parser.parse( - "SELECT * FROM test WHERE matchquery(`column`, \"this is a test\")")); + assertNotNull( + parser.parse("SELECT * FROM test WHERE matchquery(`column`, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(`column`, 'this is a test')")); assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(column, 100500)")); } @Test public void can_parse_match_query_relevance_function() { - assertNotNull(parser.parse( - "SELECT * FROM test WHERE match_query(column, \"this is a test\")")); + assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(column, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(column, 'this is a test')")); - assertNotNull(parser.parse( - "SELECT * FROM test WHERE match_query(`column`, \"this is a test\")")); + assertNotNull( + parser.parse("SELECT * FROM test WHERE match_query(`column`, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(`column`, 'this is a test')")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(column, 100500)")); } @@ -572,21 +546,21 @@ public void can_parse_match_query_relevance_function() { @Test public void can_parse_match_phrase_relevance_function() { assertNotNull( - parser.parse("SELECT * FROM test WHERE match_phrase(column, \"this is a test\")")); + parser.parse("SELECT * FROM test WHERE match_phrase(column, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_phrase(column, 'this is a test')")); assertNotNull( - parser.parse("SELECT * FROM test WHERE match_phrase(`column`, \"this is a test\")")); + parser.parse("SELECT * FROM test WHERE match_phrase(`column`, \"this is a test\")")); assertNotNull( - parser.parse("SELECT * FROM test WHERE match_phrase(`column`, 'this is a test')")); + parser.parse("SELECT * FROM test WHERE match_phrase(`column`, 'this is a test')")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_phrase(column, 100500)")); } @Test public void can_parse_minute_of_day_function() { assertNotNull(parser.parse("SELECT minute_of_day(\"12:23:34\");")); - assertNotNull(parser.parse("SELECT minute_of_day('12:23:34');"));; - assertNotNull(parser.parse("SELECT minute_of_day(\"2022-12-14 12:23:34\");"));; - assertNotNull(parser.parse("SELECT minute_of_day('2022-12-14 12:23:34');"));; + assertNotNull(parser.parse("SELECT minute_of_day('12:23:34');")); + assertNotNull(parser.parse("SELECT minute_of_day(\"2022-12-14 12:23:34\");")); + assertNotNull(parser.parse("SELECT minute_of_day('2022-12-14 12:23:34');")); } @Test @@ -631,35 +605,20 @@ public void can_parse_wildcard_query_relevance_function() { assertNotNull( parser.parse("SELECT * FROM test WHERE wildcard_query(`column`, 'this is a test*')")); assertNotNull( - parser.parse("SELECT * FROM test WHERE wildcard_query(`column`, 'this is a test*', " - + "boost=1.5, case_insensitive=true, rewrite=\"scoring_boolean\")")); + parser.parse( + "SELECT * FROM test WHERE wildcard_query(`column`, 'this is a test*', " + + "boost=1.5, case_insensitive=true, rewrite=\"scoring_boolean\")")); } @Test public void can_parse_nested_function() { - assertNotNull( - parser.parse("SELECT NESTED(PATH.INNER_FIELD) FROM TEST")); - assertNotNull( - parser.parse("SELECT NESTED('PATH.INNER_FIELD') FROM TEST")); - assertNotNull( - parser.parse("SELECT SUM(NESTED(PATH.INNER_FIELD)) FROM TEST")); - assertNotNull( - parser.parse("SELECT NESTED(PATH.INNER_FIELD, PATH) FROM TEST")); - assertNotNull( - parser.parse( - "SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS) = 'A'" - ) - ); - assertNotNull( - parser.parse( - "SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS, PATH) = 'A'" - ) - ); - assertNotNull( - parser.parse( - "SELECT FIELD FROM TEST ORDER BY nested(PATH.INNER_FIELD, PATH)" - ) - ); + assertNotNull(parser.parse("SELECT NESTED(PATH.INNER_FIELD) FROM TEST")); + assertNotNull(parser.parse("SELECT NESTED('PATH.INNER_FIELD') FROM TEST")); + assertNotNull(parser.parse("SELECT SUM(NESTED(PATH.INNER_FIELD)) FROM TEST")); + assertNotNull(parser.parse("SELECT NESTED(PATH.INNER_FIELD, PATH) FROM TEST")); + assertNotNull(parser.parse("SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS) = 'A'")); + assertNotNull(parser.parse("SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS, PATH) = 'A'")); + assertNotNull(parser.parse("SELECT FIELD FROM TEST ORDER BY nested(PATH.INNER_FIELD, PATH)")); } @Test @@ -671,68 +630,69 @@ public void can_parse_yearweek_function() { @Test public void describe_request_accepts_only_quoted_string_literals() { assertAll( - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE bank")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE %bank%")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE `bank`")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE %bank% COLUMNS LIKE %status%")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE 'bank' COLUMNS LIKE status")), + () -> + assertThrows( + SyntaxCheckException.class, () -> parser.parse("DESCRIBE TABLES LIKE bank")), + () -> + assertThrows( + SyntaxCheckException.class, () -> parser.parse("DESCRIBE TABLES LIKE %bank%")), + () -> + assertThrows( + SyntaxCheckException.class, () -> parser.parse("DESCRIBE TABLES LIKE `bank`")), + () -> + assertThrows( + SyntaxCheckException.class, + () -> parser.parse("DESCRIBE TABLES LIKE %bank% COLUMNS LIKE %status%")), + () -> + assertThrows( + SyntaxCheckException.class, + () -> parser.parse("DESCRIBE TABLES LIKE 'bank' COLUMNS LIKE status")), () -> assertNotNull(parser.parse("DESCRIBE TABLES LIKE 'bank' COLUMNS LIKE \"status\"")), - () -> assertNotNull(parser.parse("DESCRIBE TABLES LIKE \"bank\" COLUMNS LIKE 'status'")) - ); + () -> assertNotNull(parser.parse("DESCRIBE TABLES LIKE \"bank\" COLUMNS LIKE 'status'"))); } @Test public void show_request_accepts_only_quoted_string_literals() { assertAll( - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("SHOW TABLES LIKE bank")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("SHOW TABLES LIKE %bank%")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("SHOW TABLES LIKE `bank`")), + () -> assertThrows(SyntaxCheckException.class, () -> parser.parse("SHOW TABLES LIKE bank")), + () -> + assertThrows(SyntaxCheckException.class, () -> parser.parse("SHOW TABLES LIKE %bank%")), + () -> + assertThrows(SyntaxCheckException.class, () -> parser.parse("SHOW TABLES LIKE `bank`")), () -> assertNotNull(parser.parse("SHOW TABLES LIKE 'bank'")), - () -> assertNotNull(parser.parse("SHOW TABLES LIKE \"bank\"")) - ); + () -> assertNotNull(parser.parse("SHOW TABLES LIKE \"bank\""))); } @ParameterizedTest @MethodSource({ - "matchPhraseComplexQueries", - "matchPhraseGeneratedQueries", - "generateMatchPhraseQueries", - "matchPhraseQueryComplexQueries" + "matchPhraseComplexQueries", + "matchPhraseGeneratedQueries", + "generateMatchPhraseQueries", + "matchPhraseQueryComplexQueries" }) public void canParseComplexMatchPhraseArgsTest(String query) { assertNotNull(parser.parse(query)); } @ParameterizedTest - @MethodSource({ - "generateMatchPhrasePrefixQueries" - }) + @MethodSource({"generateMatchPhrasePrefixQueries"}) public void canParseComplexMatchPhrasePrefixQueries(String query) { assertNotNull(parser.parse(query)); } private static Stream matchPhraseComplexQueries() { return Stream.of( - "SELECT * FROM t WHERE match_phrase(c, 3)", - "SELECT * FROM t WHERE match_phrase(c, 3, fuzziness=AUTO)", - "SELECT * FROM t WHERE match_phrase(c, 3, zero_terms_query=\"all\")", - "SELECT * FROM t WHERE match_phrase(c, 3, lenient=true)", - "SELECT * FROM t WHERE match_phrase(c, 3, lenient='true')", - "SELECT * FROM t WHERE match_phrase(c, 3, operator=xor)", - "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04)", - "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04, analyzer = english, " - + "prefix_length=34, fuzziness='auto', minimum_should_match='2<-25% 9<-3')", - "SELECT * FROM t WHERE match_phrase(c, 3, minimum_should_match='2<-25% 9<-3')", - "SELECT * FROM t WHERE match_phrase(c, 3, operator='AUTO')" - ); + "SELECT * FROM t WHERE match_phrase(c, 3)", + "SELECT * FROM t WHERE match_phrase(c, 3, fuzziness=AUTO)", + "SELECT * FROM t WHERE match_phrase(c, 3, zero_terms_query=\"all\")", + "SELECT * FROM t WHERE match_phrase(c, 3, lenient=true)", + "SELECT * FROM t WHERE match_phrase(c, 3, lenient='true')", + "SELECT * FROM t WHERE match_phrase(c, 3, operator=xor)", + "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04)", + "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04, analyzer = english, " + + "prefix_length=34, fuzziness='auto', minimum_should_match='2<-25% 9<-3')", + "SELECT * FROM t WHERE match_phrase(c, 3, minimum_should_match='2<-25% 9<-3')", + "SELECT * FROM t WHERE match_phrase(c, 3, operator='AUTO')"); } @Test @@ -771,50 +731,51 @@ private static Stream matchPhraseQueryComplexQueries() { "SELECT * FROM t WHERE matchphrasequery(c, 3, cutoff_frequency=0.04, analyzer = english, " + "prefix_length=34, fuzziness='auto', minimum_should_match='2<-25% 9<-3')", "SELECT * FROM t WHERE matchphrasequery(c, 3, minimum_should_match='2<-25% 9<-3')", - "SELECT * FROM t WHERE matchphrasequery(c, 3, operator='AUTO')" - ); + "SELECT * FROM t WHERE matchphrasequery(c, 3, operator='AUTO')"); } private static Stream matchPhraseGeneratedQueries() { var matchArgs = new HashMap(); - matchArgs.put("fuzziness", new String[]{ "AUTO", "AUTO:1,5", "1" }); - matchArgs.put("fuzzy_transpositions", new Boolean[]{ true, false }); - matchArgs.put("operator", new String[]{ "and", "or" }); - matchArgs.put("minimum_should_match", - new String[]{ "3", "-2", "75%", "-25%", "3<90%", "2<-25% 9<-3" }); - matchArgs.put("analyzer", new String[]{ "standard", "stop", "english" }); - matchArgs.put("zero_terms_query", new String[]{ "none", "all" }); - matchArgs.put("lenient", new Boolean[]{ true, false }); + matchArgs.put("fuzziness", new String[] {"AUTO", "AUTO:1,5", "1"}); + matchArgs.put("fuzzy_transpositions", new Boolean[] {true, false}); + matchArgs.put("operator", new String[] {"and", "or"}); + matchArgs.put( + "minimum_should_match", new String[] {"3", "-2", "75%", "-25%", "3<90%", "2<-25% 9<-3"}); + matchArgs.put("analyzer", new String[] {"standard", "stop", "english"}); + matchArgs.put("zero_terms_query", new String[] {"none", "all"}); + matchArgs.put("lenient", new Boolean[] {true, false}); // deprecated - matchArgs.put("cutoff_frequency", new Double[]{ .0, 0.001, 1., 42. }); - matchArgs.put("prefix_length", new Integer[]{ 0, 2, 5 }); - matchArgs.put("max_expansions", new Integer[]{ 0, 5, 20 }); - matchArgs.put("boost", new Double[]{ .5, 1., 2.3 }); + matchArgs.put("cutoff_frequency", new Double[] {.0, 0.001, 1., 42.}); + matchArgs.put("prefix_length", new Integer[] {0, 2, 5}); + matchArgs.put("max_expansions", new Integer[] {0, 5, 20}); + matchArgs.put("boost", new Double[] {.5, 1., 2.3}); return generateQueries("match", matchArgs); } private static Stream generateMatchPhraseQueries() { var matchPhraseArgs = new HashMap(); - matchPhraseArgs.put("analyzer", new String[]{ "standard", "stop", "english" }); - matchPhraseArgs.put("max_expansions", new Integer[]{ 0, 5, 20 }); - matchPhraseArgs.put("slop", new Integer[]{ 0, 1, 2 }); + matchPhraseArgs.put("analyzer", new String[] {"standard", "stop", "english"}); + matchPhraseArgs.put("max_expansions", new Integer[] {0, 5, 20}); + matchPhraseArgs.put("slop", new Integer[] {0, 1, 2}); return generateQueries("match_phrase", matchPhraseArgs); } private static Stream generateMatchPhrasePrefixQueries() { - return generateQueries("match_phrase_prefix", ImmutableMap.builder() - .put("analyzer", new String[] {"standard", "stop", "english"}) - .put("slop", new Integer[] {0, 1, 2}) - .put("max_expansions", new Integer[] {0, 3, 10}) - .put("zero_terms_query", new String[] {"NONE", "ALL", "NULL"}) - .put("boost", new Float[] {-0.5f, 1.0f, 1.2f}) - .build()); - } - - private static Stream generateQueries(String function, - Map functionArgs) { + return generateQueries( + "match_phrase_prefix", + ImmutableMap.builder() + .put("analyzer", new String[] {"standard", "stop", "english"}) + .put("slop", new Integer[] {0, 1, 2}) + .put("max_expansions", new Integer[] {0, 3, 10}) + .put("zero_terms_query", new String[] {"NONE", "ALL", "NULL"}) + .put("boost", new Float[] {-0.5f, 1.0f, 1.2f}) + .build()); + } + + private static Stream generateQueries( + String function, Map functionArgs) { var rand = new Random(0); class QueryGenerator implements Iterator { @@ -822,7 +783,7 @@ class QueryGenerator implements Iterator { private int currentQuery = 0; private String randomIdentifier() { - return RandomStringUtils.random(10, 0, 0,true, false, null, rand); + return RandomStringUtils.random(10, 0, 0, true, false, null, rand); } @Override @@ -836,16 +797,17 @@ public String next() { currentQuery += 1; StringBuilder query = new StringBuilder(); - query.append(String.format("SELECT * FROM test WHERE %s(%s, %s", function, - randomIdentifier(), - randomIdentifier())); + query.append( + String.format( + "SELECT * FROM test WHERE %s(%s, %s", + function, randomIdentifier(), randomIdentifier())); var args = new ArrayList(); for (var pair : functionArgs.entrySet()) { if (rand.nextBoolean()) { var arg = new StringBuilder(); arg.append(rand.nextBoolean() ? "," : ", "); - arg.append(rand.nextBoolean() ? pair.getKey().toLowerCase() - : pair.getKey().toUpperCase()); + arg.append( + rand.nextBoolean() ? pair.getKey().toLowerCase() : pair.getKey().toUpperCase()); arg.append(rand.nextBoolean() ? "=" : " = "); if (pair.getValue() instanceof String[] || rand.nextBoolean()) { var quoteSymbol = rand.nextBoolean() ? '\'' : '"'; diff --git a/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java b/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java index 1ffa4f0fa8..2b64b13b35 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.domain; import static org.junit.jupiter.api.Assertions.assertAll; @@ -32,21 +31,15 @@ public void should_support_query() { @Test public void should_support_query_with_JDBC_format() { - SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1") - .format("jdbc") - .build(); + SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1").format("jdbc").build(); assertAll( - () -> assertTrue(request.isSupported()), - () -> assertEquals(request.format(), Format.JDBC) - ); + () -> assertTrue(request.isSupported()), () -> assertEquals(request.format(), Format.JDBC)); } @Test public void should_support_query_with_query_field_only() { SQLQueryRequest request = - SQLQueryRequestBuilder.request("SELECT 1") - .jsonContent("{\"query\": \"SELECT 1\"}") - .build(); + SQLQueryRequestBuilder.request("SELECT 1").jsonContent("{\"query\": \"SELECT 1\"}").build(); assertTrue(request.isSupported()); } @@ -57,21 +50,16 @@ public void should_support_query_with_parameters() { .jsonContent("{\"query\": \"SELECT 1\", \"parameters\":[]}") .build(); SQLQueryRequest requestWithParams = - SQLQueryRequestBuilder.request("SELECT 1") - .params(Map.of("one", "two")) - .build(); + SQLQueryRequestBuilder.request("SELECT 1").params(Map.of("one", "two")).build(); assertAll( () -> assertTrue(requestWithContent.isSupported()), - () -> assertTrue(requestWithParams.isSupported()) - ); + () -> assertTrue(requestWithParams.isSupported())); } @Test public void should_support_query_without_parameters() { SQLQueryRequest requestWithNoParams = - SQLQueryRequestBuilder.request("SELECT 1") - .params(Map.of()) - .build(); + SQLQueryRequestBuilder.request("SELECT 1").params(Map.of()).build(); assertTrue(requestWithNoParams.isSupported()); } @@ -79,8 +67,8 @@ public void should_support_query_without_parameters() { public void should_support_query_with_zero_fetch_size() { SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1") - .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 0}") - .build(); + .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 0}") + .build(); assertTrue(request.isSupported()); } @@ -96,52 +84,37 @@ public void should_support_query_with_parameters_and_zero_fetch_size() { @Test public void should_support_explain() { SQLQueryRequest explainRequest = - SQLQueryRequestBuilder.request("SELECT 1") - .path("_plugins/_sql/_explain") - .build(); + SQLQueryRequestBuilder.request("SELECT 1").path("_plugins/_sql/_explain").build(); assertAll( () -> assertTrue(explainRequest.isExplainRequest()), - () -> assertTrue(explainRequest.isSupported()) - ); + () -> assertTrue(explainRequest.isSupported())); } @Test public void should_support_cursor_request() { SQLQueryRequest fetchSizeRequest = SQLQueryRequestBuilder.request("SELECT 1") - .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 5}") - .build(); + .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 5}") + .build(); SQLQueryRequest cursorRequest = - SQLQueryRequestBuilder.request(null) - .cursor("abcdefgh...") - .build(); + SQLQueryRequestBuilder.request(null).cursor("abcdefgh...").build(); assertAll( () -> assertTrue(fetchSizeRequest.isSupported()), - () -> assertTrue(cursorRequest.isSupported()) - ); + () -> assertTrue(cursorRequest.isSupported())); } @Test public void should_support_cursor_close_request() { SQLQueryRequest closeRequest = - SQLQueryRequestBuilder.request(null) - .cursor("pewpew") - .path("_plugins/_sql/close") - .build(); + SQLQueryRequestBuilder.request(null).cursor("pewpew").path("_plugins/_sql/close").build(); SQLQueryRequest emptyCloseRequest = - SQLQueryRequestBuilder.request(null) - .cursor("") - .path("_plugins/_sql/close") - .build(); + SQLQueryRequestBuilder.request(null).cursor("").path("_plugins/_sql/close").build(); - SQLQueryRequest pagingRequest = - SQLQueryRequestBuilder.request(null) - .cursor("pewpew") - .build(); + SQLQueryRequest pagingRequest = SQLQueryRequestBuilder.request(null).cursor("pewpew").build(); assertAll( () -> assertTrue(closeRequest.isSupported()), @@ -149,71 +122,52 @@ public void should_support_cursor_close_request() { () -> assertTrue(pagingRequest.isSupported()), () -> assertFalse(pagingRequest.isCursorCloseRequest()), () -> assertFalse(emptyCloseRequest.isSupported()), - () -> assertTrue(emptyCloseRequest.isCursorCloseRequest()) - ); + () -> assertTrue(emptyCloseRequest.isCursorCloseRequest())); } @Test public void should_not_support_request_with_empty_cursor() { SQLQueryRequest requestWithEmptyCursor = - SQLQueryRequestBuilder.request(null) - .cursor("") - .build(); + SQLQueryRequestBuilder.request(null).cursor("").build(); SQLQueryRequest requestWithNullCursor = - SQLQueryRequestBuilder.request(null) - .cursor(null) - .build(); + SQLQueryRequestBuilder.request(null).cursor(null).build(); assertAll( () -> assertFalse(requestWithEmptyCursor.isSupported()), - () -> assertFalse(requestWithNullCursor.isSupported()) - ); + () -> assertFalse(requestWithNullCursor.isSupported())); } @Test public void should_not_support_request_with_unknown_field() { SQLQueryRequest request = - SQLQueryRequestBuilder.request("SELECT 1") - .jsonContent("{\"pewpew\": 42}") - .build(); + SQLQueryRequestBuilder.request("SELECT 1").jsonContent("{\"pewpew\": 42}").build(); assertFalse(request.isSupported()); } @Test public void should_not_support_request_with_cursor_and_something_else() { SQLQueryRequest requestWithQuery = - SQLQueryRequestBuilder.request("SELECT 1") - .cursor("n:12356") - .build(); + SQLQueryRequestBuilder.request("SELECT 1").cursor("n:12356").build(); SQLQueryRequest requestWithParams = - SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .params(Map.of("one", "two")) - .build(); + SQLQueryRequestBuilder.request(null).cursor("n:12356").params(Map.of("one", "two")).build(); SQLQueryRequest requestWithParamsWithFormat = SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .params(Map.of("format", "jdbc")) - .build(); + .cursor("n:12356") + .params(Map.of("format", "jdbc")) + .build(); SQLQueryRequest requestWithParamsWithFormatAnd = SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .params(Map.of("format", "jdbc", "something", "else")) - .build(); + .cursor("n:12356") + .params(Map.of("format", "jdbc", "something", "else")) + .build(); SQLQueryRequest requestWithFetchSize = SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .jsonContent("{\"fetch_size\": 5}") - .build(); + .cursor("n:12356") + .jsonContent("{\"fetch_size\": 5}") + .build(); SQLQueryRequest requestWithNoParams = - SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .params(Map.of()) - .build(); + SQLQueryRequestBuilder.request(null).cursor("n:12356").params(Map.of()).build(); SQLQueryRequest requestWithNoContent = - SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .jsonContent("{}") - .build(); + SQLQueryRequestBuilder.request(null).cursor("n:12356").jsonContent("{}").build(); assertAll( () -> assertFalse(requestWithQuery.isSupported()), () -> assertFalse(requestWithParams.isSupported()), @@ -221,8 +175,7 @@ public void should_not_support_request_with_cursor_and_something_else() { () -> assertTrue(requestWithNoParams.isSupported()), () -> assertTrue(requestWithParamsWithFormat.isSupported()), () -> assertFalse(requestWithParamsWithFormatAnd.isSupported()), - () -> assertTrue(requestWithNoContent.isSupported()) - ); + () -> assertTrue(requestWithNoContent.isSupported())); } @Test @@ -234,15 +187,11 @@ public void should_use_JDBC_format_by_default() { @Test public void should_support_CSV_format_and_sanitize() { - SQLQueryRequest csvRequest = - SQLQueryRequestBuilder.request("SELECT 1") - .format("csv") - .build(); + SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").format("csv").build(); assertAll( () -> assertTrue(csvRequest.isSupported()), () -> assertEquals(csvRequest.format(), Format.CSV), - () -> assertTrue(csvRequest.sanitize()) - ); + () -> assertTrue(csvRequest.sanitize())); } @Test @@ -252,36 +201,28 @@ public void should_skip_sanitize_if_set_false() { SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").params(params).build(); assertAll( () -> assertEquals(csvRequest.format(), Format.CSV), - () -> assertFalse(csvRequest.sanitize()) - ); + () -> assertFalse(csvRequest.sanitize())); } @Test public void should_not_support_other_format() { - SQLQueryRequest csvRequest = - SQLQueryRequestBuilder.request("SELECT 1") - .format("other") - .build(); + SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").format("other").build(); assertAll( () -> assertFalse(csvRequest.isSupported()), - () -> assertEquals("response in other format is not supported.", - assertThrows(IllegalArgumentException.class, csvRequest::format).getMessage()) - ); + () -> + assertEquals( + "response in other format is not supported.", + assertThrows(IllegalArgumentException.class, csvRequest::format).getMessage())); } @Test public void should_support_raw_format() { - SQLQueryRequest csvRequest = - SQLQueryRequestBuilder.request("SELECT 1") - .format("raw") - .build(); + SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").format("raw").build(); assertTrue(csvRequest.isSupported()); } - /** - * SQL query request build helper to improve test data setup readability. - */ + /** SQL query request build helper to improve test data setup readability. */ private static class SQLQueryRequestBuilder { private String jsonContent; private String query; @@ -325,9 +266,8 @@ SQLQueryRequest build() { if (format != null) { params.put("format", format); } - return new SQLQueryRequest(jsonContent == null ? null : new JSONObject(jsonContent), - query, path, params, cursor); + return new SQLQueryRequest( + jsonContent == null ? null : new JSONObject(jsonContent), query, path, params, cursor); } } - } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java index 59d723e3a2..4d2addf3d3 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -23,6 +22,7 @@ public class AnonymizerListenerTest { /** * Helper function to parse SQl queries for testing purposes. + * * @param query SQL query to be anonymized. */ private void parse(String query) { @@ -36,8 +36,9 @@ private void parse(String query) { @Test public void queriesShouldHaveAnonymousFieldAndIndex() { String query = "SELECT ABS(balance) FROM accounts WHERE age > 30 GROUP BY ABS(balance)"; - String expectedQuery = "( SELECT ABS ( identifier ) FROM table " - + "WHERE identifier > number GROUP BY ABS ( identifier ) )"; + String expectedQuery = + "( SELECT ABS ( identifier ) FROM table " + + "WHERE identifier > number GROUP BY ABS ( identifier ) )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -92,12 +93,13 @@ public void queriesWithAggregatesShouldAnonymizeSensitiveData() { @Test public void queriesWithSubqueriesShouldAnonymizeSensitiveData() { - String query = "SELECT a.f, a.l, a.a FROM " - + "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a"; + String query = + "SELECT a.f, a.l, a.a FROM " + + "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a"; String expectedQuery = - "( SELECT identifier.identifier, identifier.identifier, identifier.identifier FROM " - + "( SELECT identifier AS identifier, identifier AS identifier, identifier AS identifier " - + "FROM table WHERE identifier > number ) identifier )"; + "( SELECT identifier.identifier, identifier.identifier, identifier.identifier FROM ( SELECT" + + " identifier AS identifier, identifier AS identifier, identifier AS identifier FROM" + + " table WHERE identifier > number ) identifier )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -121,8 +123,9 @@ public void queriesWithOrderByShouldAnonymizeSensitiveData() { @Test public void queriesWithHavingShouldAnonymizeSensitiveData() { String query = "SELECT SUM(balance) FROM accounts GROUP BY lastname HAVING COUNT(balance) > 2"; - String expectedQuery = "( SELECT SUM ( identifier ) FROM table " - + "GROUP BY identifier HAVING COUNT ( identifier ) > number )"; + String expectedQuery = + "( SELECT SUM ( identifier ) FROM table " + + "GROUP BY identifier HAVING COUNT ( identifier ) > number )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -130,8 +133,9 @@ public void queriesWithHavingShouldAnonymizeSensitiveData() { @Test public void queriesWithHighlightShouldAnonymizeSensitiveData() { String query = "SELECT HIGHLIGHT(str0) FROM CALCS WHERE QUERY_STRING(['str0'], 'FURNITURE')"; - String expectedQuery = "( SELECT HIGHLIGHT ( identifier ) FROM table WHERE " - + "QUERY_STRING ( [ 'string_literal' ], 'string_literal' ) )"; + String expectedQuery = + "( SELECT HIGHLIGHT ( identifier ) FROM table WHERE " + + "QUERY_STRING ( [ 'string_literal' ], 'string_literal' ) )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -139,8 +143,8 @@ public void queriesWithHighlightShouldAnonymizeSensitiveData() { @Test public void queriesWithMatchShouldAnonymizeSensitiveData() { String query = "SELECT str0 FROM CALCS WHERE MATCH(str0, 'FURNITURE')"; - String expectedQuery = "( SELECT identifier FROM table " - + "WHERE MATCH ( identifier, 'string_literal' ) )"; + String expectedQuery = + "( SELECT identifier FROM table " + "WHERE MATCH ( identifier, 'string_literal' ) )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -155,10 +159,12 @@ public void queriesWithPositionShouldAnonymizeSensitiveData() { @Test public void queriesWithMatch_Bool_Prefix_ShouldAnonymizeSensitiveData() { - String query = "SELECT firstname, address FROM accounts WHERE " - + "match_bool_prefix(address, 'Bristol Street', minimum_should_match=2)"; - String expectedQuery = "( SELECT identifier, identifier FROM table WHERE MATCH_BOOL_PREFIX " - + "( identifier, 'string_literal', MINIMUM_SHOULD_MATCH = number ) )"; + String query = + "SELECT firstname, address FROM accounts WHERE " + + "match_bool_prefix(address, 'Bristol Street', minimum_should_match=2)"; + String expectedQuery = + "( SELECT identifier, identifier FROM table WHERE MATCH_BOOL_PREFIX " + + "( identifier, 'string_literal', MINIMUM_SHOULD_MATCH = number ) )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -195,10 +201,7 @@ public void queriesWithNotEqualAlternateShouldAnonymizeSensitiveData() { assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } - - /** - * Test added for coverage, but the errorNode will not be hit normally. - */ + /** Test added for coverage, but the errorNode will not be hit normally. */ @Test public void enterErrorNote() { ErrorNode node = mock(ErrorNode.class); diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java index fff789de44..95188e20b6 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static java.util.Collections.emptyList; @@ -59,10 +58,9 @@ void can_build_group_by_clause_with_scalar_expression() { buildAggregation("SELECT ABS(age + 1) FROM test GROUP BY ABS(age + 1)"), allOf( hasGroupByItems( - alias("ABS(+(age, 1))", function("ABS", - function("+", - qualifiedName("age"), - intLiteral(1))))), + alias( + "ABS(+(age, 1))", + function("ABS", function("+", qualifiedName("age"), intLiteral(1))))), hasAggregators())); } @@ -79,9 +77,7 @@ void can_build_group_by_clause_with_complicated_aggregators() { void can_build_group_by_clause_without_aggregators() { assertThat( buildAggregation("SELECT state FROM test GROUP BY state"), - allOf( - hasGroupByItems(alias("state", qualifiedName("state"))), - hasAggregators())); + allOf(hasGroupByItems(alias("state", qualifiedName("state"))), hasAggregators())); } @Test @@ -101,50 +97,43 @@ void can_build_implicit_group_by_for_aggregator_in_having_clause() { buildAggregation("SELECT true FROM test HAVING AVG(age) > 30"), allOf( hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( - buildAggregation("SELECT PI() FROM test HAVING AVG(age) > 30"), - allOf( - hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + buildAggregation("SELECT PI() FROM test HAVING AVG(age) > 30"), + allOf( + hasGroupByItems(), + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( - buildAggregation("SELECT ABS(1.5) FROM test HAVING AVG(age) > 30"), - allOf( - hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + buildAggregation("SELECT ABS(1.5) FROM test HAVING AVG(age) > 30"), + allOf( + hasGroupByItems(), + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( - buildAggregation("SELECT ABS(ABS(1.5)) FROM test HAVING AVG(age) > 30"), - allOf( - hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + buildAggregation("SELECT ABS(ABS(1.5)) FROM test HAVING AVG(age) > 30"), + allOf( + hasGroupByItems(), + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( buildAggregation("SELECT INTERVAL 1 DAY FROM test HAVING AVG(age) > 30"), allOf( hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( buildAggregation("SELECT CAST(1 AS LONG) FROM test HAVING AVG(age) > 30"), allOf( hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( buildAggregation("SELECT CASE WHEN true THEN 1 ELSE 2 END FROM test HAVING AVG(age) > 30"), allOf( hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); } @Test @@ -154,8 +143,7 @@ void can_build_distinct_aggregator() { allOf( hasGroupByItems(alias("age", qualifiedName("age"))), hasAggregators( - alias("COUNT(DISTINCT name)", distinctAggregate("COUNT", qualifiedName( - "name")))))); + alias("COUNT(DISTINCT name)", distinctAggregate("COUNT", qualifiedName("name")))))); } @Test @@ -167,8 +155,8 @@ void should_build_nothing_if_no_group_by_and_no_aggregators_in_select() { void should_replace_group_by_alias_by_expression_in_select_clause() { assertThat( buildAggregation("SELECT state AS s, name FROM test GROUP BY s, name"), - hasGroupByItems(alias("state", qualifiedName("state")), - alias("name", qualifiedName("name")))); + hasGroupByItems( + alias("state", qualifiedName("state")), alias("name", qualifiedName("name")))); assertThat( buildAggregation("SELECT ABS(age) AS a FROM test GROUP BY a"), @@ -190,25 +178,30 @@ void should_replace_group_by_ordinal_by_expression_in_select_clause() { @Test void should_report_error_for_non_integer_ordinal_in_group_by() { - SemanticCheckException error = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT state AS s FROM test GROUP BY 1.5")); - assertEquals( - "Non-integer constant [1.5] found in ordinal", - error.getMessage()); + SemanticCheckException error = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT state AS s FROM test GROUP BY 1.5")); + assertEquals("Non-integer constant [1.5] found in ordinal", error.getMessage()); } - @Disabled("This validation is supposed to be in analyzing phase. This test should be enabled " + @Disabled( + "This validation is supposed to be in analyzing phase. This test should be enabled " + "once https://github.com/opensearch-project/sql/issues/910 has been resolved") @Test void should_report_error_for_mismatch_between_select_and_group_by_items() { - SemanticCheckException error1 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT name FROM test GROUP BY state")); + SemanticCheckException error1 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT name FROM test GROUP BY state")); assertEquals( "Expression [name] that contains non-aggregated column is not present in group by clause", error1.getMessage()); - SemanticCheckException error2 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT ABS(name + 1) FROM test GROUP BY name")); + SemanticCheckException error2 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT ABS(name + 1) FROM test GROUP BY name")); assertEquals( "Expression [Function(funcName=ABS, funcArgs=[Function(funcName=+, " + "funcArgs=[name, Literal(value=1, type=INTEGER)])])] that contains " @@ -218,15 +211,19 @@ void should_report_error_for_mismatch_between_select_and_group_by_items() { @Test void should_report_error_for_non_aggregated_item_in_select_if_no_group_by() { - SemanticCheckException error1 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT age, AVG(balance) FROM tests")); + SemanticCheckException error1 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT age, AVG(balance) FROM tests")); assertEquals( "Explicit GROUP BY clause is required because expression [age] " + "contains non-aggregated column", error1.getMessage()); - SemanticCheckException error2 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT ABS(age + 1), AVG(balance) FROM tests")); + SemanticCheckException error2 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT ABS(age + 1), AVG(balance) FROM tests")); assertEquals( "Explicit GROUP BY clause is required because expression [ABS(+(age, 1))] " + "contains non-aggregated column", @@ -235,19 +232,25 @@ void should_report_error_for_non_aggregated_item_in_select_if_no_group_by() { @Test void should_report_error_for_group_by_ordinal_out_of_bound_of_select_list() { - SemanticCheckException error1 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 0")); + SemanticCheckException error1 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 0")); assertEquals("Ordinal [0] is out of bound of select item list", error1.getMessage()); - SemanticCheckException error2 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 3")); + SemanticCheckException error2 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 3")); assertEquals("Ordinal [3] is out of bound of select item list", error2.getMessage()); } @Test void should_report_error_for_non_aggregated_item_in_select_if_only_having() { - SemanticCheckException error = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT age FROM tests HAVING AVG(balance) > 30")); + SemanticCheckException error = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT age FROM tests HAVING AVG(balance) > 30")); assertEquals( "Explicit GROUP BY clause is required because expression [age] " + "contains non-aggregated column", @@ -262,10 +265,10 @@ private Matcher hasAggregators(UnresolvedExpression... exprs) { return featureValueOf("aggregators", Aggregation::getAggExprList, exprs); } - private Matcher featureValueOf(String name, - Function> getter, - UnresolvedExpression... exprs) { + private Matcher featureValueOf( + String name, + Function> getter, + UnresolvedExpression... exprs) { Matcher> subMatcher = (exprs.length == 0) ? equalTo(emptyList()) : equalTo(Arrays.asList(exprs)); return new FeatureMatcher>(subMatcher, name, "") { @@ -295,5 +298,4 @@ private QuerySpecificationContext parse(String query) { parser.addErrorListener(new SyntaxAnalysisErrorListener()); return parser.querySpecification(); } - } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java index 3e56a89754..8ab314f695 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static java.util.Collections.emptyList; @@ -53,36 +52,20 @@ public void can_build_select_literals() { alias("'hello'", stringLiteral("hello")), alias("\"world\"", stringLiteral("world")), alias("false", booleanLiteral(false)), - alias("-4.567", doubleLiteral(-4.567)) - ), - buildAST("SELECT 123, 'hello', \"world\", false, -4.567") - ); + alias("-4.567", doubleLiteral(-4.567))), + buildAST("SELECT 123, 'hello', \"world\", false, -4.567")); } @Test public void can_build_select_function_call_with_alias() { assertEquals( - project( - relation("test"), - alias( - "ABS(age)", - function("ABS", qualifiedName("age")), - "a" - ) - ), - buildAST("SELECT ABS(age) AS a FROM test") - ); + project(relation("test"), alias("ABS(age)", function("ABS", qualifiedName("age")), "a")), + buildAST("SELECT ABS(age) AS a FROM test")); } @Test public void can_build_select_all_from_index() { - assertEquals( - project( - relation("test"), - AllFields.of() - ), - buildAST("SELECT * FROM test") - ); + assertEquals(project(relation("test"), AllFields.of()), buildAST("SELECT * FROM test")); assertThrows(SyntaxCheckException.class, () -> buildAST("SELECT *")); } @@ -90,14 +73,8 @@ public void can_build_select_all_from_index() { @Test public void can_build_nested_select_all() { assertEquals( - project( - relation("test"), - alias("nested(field.*)", - new NestedAllTupleFields("field") - ) - ), - buildAST("SELECT nested(field.*) FROM test") - ); + project(relation("test"), alias("nested(field.*)", new NestedAllTupleFields("field"))), + buildAST("SELECT nested(field.*) FROM test")); } @Test @@ -107,32 +84,22 @@ public void can_build_select_all_and_fields_from_index() { relation("test"), AllFields.of(), alias("age", qualifiedName("age")), - alias("age", qualifiedName("age"), "a") - ), - buildAST("SELECT *, age, age as a FROM test") - ); + alias("age", qualifiedName("age"), "a")), + buildAST("SELECT *, age, age as a FROM test")); } @Test public void can_build_select_fields_from_index() { assertEquals( - project( - relation("test"), - alias("age", qualifiedName("age")) - ), - buildAST("SELECT age FROM test") - ); + project(relation("test"), alias("age", qualifiedName("age"))), + buildAST("SELECT age FROM test")); } @Test public void can_build_select_fields_with_alias() { assertEquals( - project( - relation("test"), - alias("age", qualifiedName("age"), "a") - ), - buildAST("SELECT age AS a FROM test") - ); + project(relation("test"), alias("age", qualifiedName("age"), "a")), + buildAST("SELECT age AS a FROM test")); } @Test @@ -140,17 +107,8 @@ public void can_build_select_fields_with_alias_quoted() { assertEquals( project( relation("test"), - alias( - "(age + 10)", - function("+", qualifiedName("age"), intLiteral(10)), - "Age_Expr" - ) - ), - buildAST("SELECT" - + " (age + 10) AS `Age_Expr` " - + "FROM test" - ) - ); + alias("(age + 10)", function("+", qualifiedName("age"), intLiteral(10)), "Age_Expr")), + buildAST("SELECT (age + 10) AS `Age_Expr` FROM test")); } @Test @@ -158,42 +116,27 @@ public void can_build_from_index_with_alias() { assertEquals( project( filter( - relation("test", "tt"), - function("=", qualifiedName("tt", "age"), intLiteral(30))), - alias("tt.name", qualifiedName("tt", "name")) - ), - buildAST("SELECT tt.name FROM test AS tt WHERE tt.age = 30") - ); + relation("test", "tt"), function("=", qualifiedName("tt", "age"), intLiteral(30))), + alias("tt.name", qualifiedName("tt", "name"))), + buildAST("SELECT tt.name FROM test AS tt WHERE tt.age = 30")); } @Test public void can_build_from_index_with_alias_quoted() { assertEquals( project( - filter( - relation("test", "t"), - function("=", qualifiedName("t", "age"), intLiteral(30))), - alias("`t`.name", qualifiedName("t", "name")) - ), - buildAST("SELECT `t`.name FROM test `t` WHERE `t`.age = 30") - ); + filter(relation("test", "t"), function("=", qualifiedName("t", "age"), intLiteral(30))), + alias("`t`.name", qualifiedName("t", "name"))), + buildAST("SELECT `t`.name FROM test `t` WHERE `t`.age = 30")); } @Test public void can_build_where_clause() { assertEquals( project( - filter( - relation("test"), - function( - "=", - qualifiedName("name"), - stringLiteral("John")) - ), - alias("name", qualifiedName("name")) - ), - buildAST("SELECT name FROM test WHERE name = 'John'") - ); + filter(relation("test"), function("=", qualifiedName("name"), stringLiteral("John"))), + alias("name", qualifiedName("name"))), + buildAST("SELECT name FROM test WHERE name = 'John'")); } @Test @@ -202,8 +145,7 @@ public void can_build_count_literal() { project( agg( relation("test"), - ImmutableList.of( - alias("COUNT(1)", aggregate("COUNT", intLiteral(1)))), + ImmutableList.of(alias("COUNT(1)", aggregate("COUNT", intLiteral(1)))), emptyList(), emptyList(), emptyList()), @@ -217,8 +159,7 @@ public void can_build_count_star() { project( agg( relation("test"), - ImmutableList.of( - alias("COUNT(*)", aggregate("COUNT", AllFields.of()))), + ImmutableList.of(alias("COUNT(*)", aggregate("COUNT", AllFields.of()))), emptyList(), emptyList(), emptyList()), @@ -328,9 +269,7 @@ public void can_build_having_clause() { emptyList(), ImmutableList.of(alias("name", qualifiedName("name"))), emptyList()), - function(">", - aggregate("MIN", qualifiedName("balance")), - intLiteral(1000))), + function(">", aggregate("MIN", qualifiedName("balance")), intLiteral(1000))), alias("name", qualifiedName("name")), alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), buildAST("SELECT name, AVG(age) FROM test GROUP BY name HAVING MIN(balance) > 1000")); @@ -343,14 +282,11 @@ public void can_build_having_condition_using_alias() { filter( agg( relation("test"), - ImmutableList.of( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), + ImmutableList.of(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), emptyList(), ImmutableList.of(alias("name", qualifiedName("name"))), emptyList()), - function(">", - aggregate("AVG", qualifiedName("age")), - intLiteral(1000))), + function(">", aggregate("AVG", qualifiedName("age")), intLiteral(1000))), alias("name", qualifiedName("name")), alias("AVG(age)", aggregate("AVG", qualifiedName("age")), "a")), buildAST("SELECT name, AVG(age) AS a FROM test GROUP BY name HAVING a > 1000")); @@ -360,9 +296,7 @@ public void can_build_having_condition_using_alias() { public void can_build_order_by_field_name() { assertEquals( project( - sort( - relation("test"), - field("name", argument("asc", booleanLiteral(true)))), + sort(relation("test"), field("name", argument("asc", booleanLiteral(true)))), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test ORDER BY name")); } @@ -374,8 +308,7 @@ public void can_build_order_by_function() { sort( relation("test"), field( - function("ABS", qualifiedName("name")), - argument("asc", booleanLiteral(true)))), + function("ABS", qualifiedName("name")), argument("asc", booleanLiteral(true)))), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test ORDER BY ABS(name)")); } @@ -384,9 +317,7 @@ public void can_build_order_by_function() { public void can_build_order_by_alias() { assertEquals( project( - sort( - relation("test"), - field("name", argument("asc", booleanLiteral(true)))), + sort(relation("test"), field("name", argument("asc", booleanLiteral(true)))), alias("name", qualifiedName("name"), "n")), buildAST("SELECT name AS n FROM test ORDER BY n ASC")); } @@ -395,9 +326,7 @@ public void can_build_order_by_alias() { public void can_build_order_by_ordinal() { assertEquals( project( - sort( - relation("test"), - field("name", argument("asc", booleanLiteral(false)))), + sort(relation("test"), field("name", argument("asc", booleanLiteral(false)))), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test ORDER BY 1 DESC")); } @@ -424,8 +353,7 @@ public void can_build_select_distinct_clause() { emptyList(), emptyList(), ImmutableList.of( - alias("name", qualifiedName("name")), - alias("age", qualifiedName("age"))), + alias("name", qualifiedName("name")), alias("age", qualifiedName("age"))), emptyList()), alias("name", qualifiedName("name")), alias("age", qualifiedName("age"))), @@ -441,26 +369,21 @@ public void can_build_select_distinct_clause_with_function() { emptyList(), emptyList(), ImmutableList.of( - alias("SUBSTRING(name, 1, 2)", + alias( + "SUBSTRING(name, 1, 2)", function( - "SUBSTRING", - qualifiedName("name"), - intLiteral(1), intLiteral(2)))), + "SUBSTRING", qualifiedName("name"), intLiteral(1), intLiteral(2)))), emptyList()), - alias("SUBSTRING(name, 1, 2)", - function( - "SUBSTRING", - qualifiedName("name"), - intLiteral(1), intLiteral(2)))), + alias( + "SUBSTRING(name, 1, 2)", + function("SUBSTRING", qualifiedName("name"), intLiteral(1), intLiteral(2)))), buildAST("SELECT DISTINCT SUBSTRING(name, 1, 2) FROM test")); } @Test public void can_build_select_all_clause() { assertEquals( - buildAST("SELECT name, age FROM test"), - buildAST("SELECT ALL name, age FROM test") - ); + buildAST("SELECT name, age FROM test"), buildAST("SELECT ALL name, age FROM test")); } @Test @@ -469,22 +392,28 @@ public void can_build_order_by_null_option() { project( sort( relation("test"), - field("name", + field( + "name", argument("asc", booleanLiteral(true)), argument("nullFirst", booleanLiteral(false)))), - alias("name", qualifiedName("name"))), + alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test ORDER BY name NULLS LAST")); } /** + * + * + *
    * Ensure Nested function falls back to legacy engine when used in an HAVING clause.
    * TODO Remove this test when support is added.
+   * 
*/ @Test public void nested_in_having_clause_throws_exception() { - SyntaxCheckException exception = assertThrows(SyntaxCheckException.class, - () -> buildAST("SELECT count(*) FROM test HAVING nested(message.info)") - ); + SyntaxCheckException exception = + assertThrows( + SyntaxCheckException.class, + () -> buildAST("SELECT count(*) FROM test HAVING nested(message.info)")); assertEquals( "Falling back to legacy engine. Nested function is not supported in the HAVING clause.", @@ -495,23 +424,15 @@ public void nested_in_having_clause_throws_exception() { public void can_build_order_by_sort_order_keyword_insensitive() { assertEquals( project( - sort( - relation("test"), - field("age", - argument("asc", booleanLiteral(true)))), + sort(relation("test"), field("age", argument("asc", booleanLiteral(true)))), alias("age", qualifiedName("age"))), - buildAST("SELECT age FROM test ORDER BY age ASC") - ); + buildAST("SELECT age FROM test ORDER BY age ASC")); assertEquals( project( - sort( - relation("test"), - field("age", - argument("asc", booleanLiteral(true)))), + sort(relation("test"), field("age", argument("asc", booleanLiteral(true)))), alias("age", qualifiedName("age"))), - buildAST("SELECT age FROM test ORDER BY age asc") - ); + buildAST("SELECT age FROM test ORDER BY age asc")); } @Test @@ -523,20 +444,15 @@ public void can_build_from_subquery() { project( relation("test"), alias("firstname", qualifiedName("firstname"), "firstName"), - alias("lastname", qualifiedName("lastname"), "lastName") - ), - "a" - ), - function(">", qualifiedName("age"), intLiteral(20)) - ), + alias("lastname", qualifiedName("lastname"), "lastName")), + "a"), + function(">", qualifiedName("age"), intLiteral(20))), alias("a.firstName", qualifiedName("a", "firstName")), alias("lastName", qualifiedName("lastName"))), buildAST( "SELECT a.firstName, lastName FROM (" + "SELECT firstname AS firstName, lastname AS lastName FROM test" - + ") AS a where age > 20" - ) - ); + + ") AS a where age > 20")); } @Test @@ -545,19 +461,15 @@ public void can_build_from_subquery_with_backquoted_alias() { project( relationSubquery( project( - relation("test"), - alias("firstname", qualifiedName("firstname"), "firstName")), + relation("test"), alias("firstname", qualifiedName("firstname"), "firstName")), "a"), - alias("a.firstName", qualifiedName("a", "firstName")) - ), + alias("a.firstName", qualifiedName("a", "firstName"))), buildAST( "SELECT a.firstName " + "FROM ( " + " SELECT `firstname` AS `firstName` " + " FROM `test` " - + ") AS `a`" - ) - ); + + ") AS `a`")); } @Test @@ -566,12 +478,9 @@ public void can_build_show_all_tables() { project( filter( relation(TABLE_INFO), - function("like", qualifiedName("TABLE_NAME"), stringLiteral("%")) - ), - AllFields.of() - ), - buildAST("SHOW TABLES LIKE '%'") - ); + function("like", qualifiedName("TABLE_NAME"), stringLiteral("%"))), + AllFields.of()), + buildAST("SHOW TABLES LIKE '%'")); } @Test @@ -580,12 +489,9 @@ public void can_build_show_selected_tables() { project( filter( relation(TABLE_INFO), - function("like", qualifiedName("TABLE_NAME"), stringLiteral("a_c%")) - ), - AllFields.of() - ), - buildAST("SHOW TABLES LIKE 'a_c%'") - ); + function("like", qualifiedName("TABLE_NAME"), stringLiteral("a_c%"))), + AllFields.of()), + buildAST("SHOW TABLES LIKE 'a_c%'")); } @Test @@ -594,23 +500,16 @@ public void show_compatible_with_old_engine_syntax() { project( filter( relation(TABLE_INFO), - function("like", qualifiedName("TABLE_NAME"), stringLiteral("%")) - ), - AllFields.of() - ), - buildAST("SHOW TABLES LIKE '%'") - ); + function("like", qualifiedName("TABLE_NAME"), stringLiteral("%"))), + AllFields.of()), + buildAST("SHOW TABLES LIKE '%'")); } @Test public void can_build_describe_selected_tables() { assertEquals( - project( - relation(mappingTable("a_c%")), - AllFields.of() - ), - buildAST("DESCRIBE TABLES LIKE 'a_c%'") - ); + project(relation(mappingTable("a_c%")), AllFields.of()), + buildAST("DESCRIBE TABLES LIKE 'a_c%'")); } @Test @@ -619,23 +518,16 @@ public void can_build_describe_selected_tables_field_filter() { project( filter( relation(mappingTable("a_c%")), - function("like", qualifiedName("COLUMN_NAME"), stringLiteral("name%")) - ), - AllFields.of() - ), - buildAST("DESCRIBE TABLES LIKE 'a_c%' COLUMNS LIKE 'name%'") - ); + function("like", qualifiedName("COLUMN_NAME"), stringLiteral("name%"))), + AllFields.of()), + buildAST("DESCRIBE TABLES LIKE 'a_c%' COLUMNS LIKE 'name%'")); } @Test public void can_build_alias_by_keywords() { assertEquals( - project( - relation("test"), - alias("avg_age", qualifiedName("avg_age"), "avg") - ), - buildAST("SELECT avg_age AS avg FROM test") - ); + project(relation("test"), alias("avg_age", qualifiedName("avg_age"), "avg")), + buildAST("SELECT avg_age AS avg FROM test")); } @Test @@ -643,42 +535,20 @@ public void can_build_limit_clause() { assertEquals( project( limit( - sort( - relation("test"), - field("age", argument("asc", booleanLiteral(true))) - ), - 10, - 0 - ), + sort(relation("test"), field("age", argument("asc", booleanLiteral(true)))), 10, 0), alias("name", qualifiedName("name")), - alias("age", qualifiedName("age")) - ), - buildAST("SELECT name, age FROM test ORDER BY age LIMIT 10") - ); + alias("age", qualifiedName("age"))), + buildAST("SELECT name, age FROM test ORDER BY age LIMIT 10")); } @Test public void can_build_limit_clause_with_offset() { assertEquals( - project( - limit( - relation("test"), - 10, - 5 - ), - alias("name", qualifiedName("name")) - ), + project(limit(relation("test"), 10, 5), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test LIMIT 10 OFFSET 5")); assertEquals( - project( - limit( - relation("test"), - 10, - 5 - ), - alias("name", qualifiedName("name")) - ), + project(limit(relation("test"), 10, 5), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test LIMIT 5, 10")); } @@ -686,11 +556,10 @@ public void can_build_limit_clause_with_offset() { public void can_build_qualified_name_highlight() { Map args = new HashMap<>(); assertEquals( - project(relation("test"), - alias("highlight(fieldA)", - highlight(AstDSL.qualifiedName("fieldA"), args))), - buildAST("SELECT highlight(fieldA) FROM test") - ); + project( + relation("test"), + alias("highlight(fieldA)", highlight(AstDSL.qualifiedName("fieldA"), args))), + buildAST("SELECT highlight(fieldA) FROM test")); } @Test @@ -699,22 +568,22 @@ public void can_build_qualified_highlight_with_arguments() { args.put("pre_tags", new Literal("", DataType.STRING)); args.put("post_tags", new Literal("", DataType.STRING)); assertEquals( - project(relation("test"), - alias("highlight(fieldA, pre_tags='', post_tags='')", + project( + relation("test"), + alias( + "highlight(fieldA, pre_tags='', post_tags='')", highlight(AstDSL.qualifiedName("fieldA"), args))), - buildAST("SELECT highlight(fieldA, pre_tags='', post_tags='') " - + "FROM test") - ); + buildAST( + "SELECT highlight(fieldA, pre_tags='', post_tags='') " + "FROM test")); } @Test public void can_build_string_literal_highlight() { Map args = new HashMap<>(); assertEquals( - project(relation("test"), - alias("highlight(\"fieldA\")", - highlight(AstDSL.stringLiteral("fieldA"), args))), - buildAST("SELECT highlight(\"fieldA\") FROM test") - ); + project( + relation("test"), + alias("highlight(\"fieldA\")", highlight(AstDSL.stringLiteral("fieldA"), args))), + buildAST("SELECT highlight(\"fieldA\") FROM test")); } } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java index 2161eb5b1a..602f17ce85 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java @@ -10,9 +10,7 @@ import org.opensearch.sql.sql.antlr.SQLSyntaxParser; public class AstBuilderTestBase { - /** - * SQL syntax parser that helps prepare parse tree as AstBuilder input. - */ + /** SQL syntax parser that helps prepare parse tree as AstBuilder input. */ private final SQLSyntaxParser parser = new SQLSyntaxParser(); protected UnresolvedPlan buildAST(String query) { diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java index 20655bc020..f2e7fdb2d8 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -57,185 +56,122 @@ class AstExpressionBuilderTest { @Test public void canBuildStringLiteral() { - assertEquals( - stringLiteral("hello"), - buildExprAst("'hello'") - ); - assertEquals( - stringLiteral("hello"), - buildExprAst("\"hello\"") - ); + assertEquals(stringLiteral("hello"), buildExprAst("'hello'")); + assertEquals(stringLiteral("hello"), buildExprAst("\"hello\"")); } @Test public void canBuildIntegerLiteral() { - assertEquals( - intLiteral(123), - buildExprAst("123") - ); - assertEquals( - intLiteral(Integer.MAX_VALUE), - buildExprAst(String.valueOf(Integer.MAX_VALUE)) - ); - assertEquals( - intLiteral(Integer.MIN_VALUE), - buildExprAst(String.valueOf(Integer.MIN_VALUE)) - ); + assertEquals(intLiteral(123), buildExprAst("123")); + assertEquals(intLiteral(Integer.MAX_VALUE), buildExprAst(String.valueOf(Integer.MAX_VALUE))); + assertEquals(intLiteral(Integer.MIN_VALUE), buildExprAst(String.valueOf(Integer.MIN_VALUE))); } @Test public void canBuildLongLiteral() { + assertEquals(longLiteral(1234567890123L), buildExprAst("1234567890123")); assertEquals( - longLiteral(1234567890123L), - buildExprAst("1234567890123") - ); + longLiteral(Integer.MAX_VALUE + 1L), buildExprAst(String.valueOf(Integer.MAX_VALUE + 1L))); assertEquals( - longLiteral(Integer.MAX_VALUE + 1L), - buildExprAst(String.valueOf(Integer.MAX_VALUE + 1L)) - ); - assertEquals( - longLiteral(Integer.MIN_VALUE - 1L), - buildExprAst(String.valueOf(Integer.MIN_VALUE - 1L)) - ); + longLiteral(Integer.MIN_VALUE - 1L), buildExprAst(String.valueOf(Integer.MIN_VALUE - 1L))); } @Test public void canBuildNegativeRealLiteral() { - assertEquals( - doubleLiteral(-4.567), - buildExprAst("-4.567") - ); + assertEquals(doubleLiteral(-4.567), buildExprAst("-4.567")); } @Test public void canBuildBooleanLiteral() { - assertEquals( - booleanLiteral(true), - buildExprAst("true") - ); + assertEquals(booleanLiteral(true), buildExprAst("true")); } @Test public void canBuildDateLiteral() { - assertEquals( - dateLiteral("2020-07-07"), - buildExprAst("DATE '2020-07-07'") - ); + assertEquals(dateLiteral("2020-07-07"), buildExprAst("DATE '2020-07-07'")); } @Test public void canBuildTimeLiteral() { - assertEquals( - timeLiteral("11:30:45"), - buildExprAst("TIME '11:30:45'") - ); + assertEquals(timeLiteral("11:30:45"), buildExprAst("TIME '11:30:45'")); } @Test public void canBuildTimestampLiteral() { assertEquals( - timestampLiteral("2020-07-07 11:30:45"), - buildExprAst("TIMESTAMP '2020-07-07 11:30:45'") - ); + timestampLiteral("2020-07-07 11:30:45"), buildExprAst("TIMESTAMP '2020-07-07 11:30:45'")); } @Test public void canBuildIntervalLiteral() { - assertEquals( - intervalLiteral(1, DataType.INTEGER, "day"), - buildExprAst("interval 1 day") - ); + assertEquals(intervalLiteral(1, DataType.INTEGER, "day"), buildExprAst("interval 1 day")); } @Test public void canBuildArithmeticExpression() { - assertEquals( - function("+", intLiteral(1), intLiteral(2)), - buildExprAst("1 + 2") - ); + assertEquals(function("+", intLiteral(1), intLiteral(2)), buildExprAst("1 + 2")); } @Test public void canBuildArithmeticExpressionPrecedence() { assertEquals( - function("+", - intLiteral(1), - function("*", - intLiteral(2), intLiteral(3))), - buildExprAst("1 + 2 * 3") - ); + function("+", intLiteral(1), function("*", intLiteral(2), intLiteral(3))), + buildExprAst("1 + 2 * 3")); } @Test public void canBuildFunctionWithoutArguments() { - assertEquals( - function("PI"), - buildExprAst("PI()") - ); + assertEquals(function("PI"), buildExprAst("PI()")); } @Test public void canBuildExpressionWithParentheses() { assertEquals( - function("*", + function( + "*", function("+", doubleLiteral(-1.0), doubleLiteral(2.3)), - function("-", intLiteral(3), intLiteral(1)) - ), - buildExprAst("(-1.0 + 2.3) * (3 - 1)") - ); + function("-", intLiteral(3), intLiteral(1))), + buildExprAst("(-1.0 + 2.3) * (3 - 1)")); } @Test public void canBuildFunctionCall() { - assertEquals( - function("abs", intLiteral(-1)), - buildExprAst("abs(-1)") - ); + assertEquals(function("abs", intLiteral(-1)), buildExprAst("abs(-1)")); } @Test public void canBuildExtractFunctionCall() { assertEquals( function("extract", stringLiteral("DAY"), dateLiteral("2023-02-09")).toString(), - buildExprAst("extract(DAY FROM \"2023-02-09\")").toString() - ); + buildExprAst("extract(DAY FROM \"2023-02-09\")").toString()); } @Test public void canBuildGetFormatFunctionCall() { assertEquals( function("get_format", stringLiteral("DATE"), stringLiteral("USA")), - buildExprAst("get_format(DATE,\"USA\")") - ); + buildExprAst("get_format(DATE,\"USA\")")); } @Test public void canBuildNestedFunctionCall() { assertEquals( - function("abs", - function("*", - function("abs", intLiteral(-5)), - intLiteral(-1) - ) - ), - buildExprAst("abs(abs(-5) * -1)") - ); + function("abs", function("*", function("abs", intLiteral(-5)), intLiteral(-1))), + buildExprAst("abs(abs(-5) * -1)")); } @Test public void canBuildDateAndTimeFunctionCall() { assertEquals( function("dayofmonth", dateLiteral("2020-07-07")), - buildExprAst("dayofmonth(DATE '2020-07-07')") - ); + buildExprAst("dayofmonth(DATE '2020-07-07')")); } @Test public void canBuildTimestampAddFunctionCall() { assertEquals( function("timestampadd", stringLiteral("WEEK"), intLiteral(1), dateLiteral("2023-03-14")), - buildExprAst("timestampadd(WEEK, 1, DATE '2023-03-14')") - ); + buildExprAst("timestampadd(WEEK, 1, DATE '2023-03-14')")); } @Test @@ -246,105 +182,69 @@ public void canBuildTimstampDiffFunctionCall() { stringLiteral("WEEK"), timestampLiteral("2023-03-15 00:00:01"), dateLiteral("2023-03-14")), - buildExprAst("timestampdiff(WEEK, TIMESTAMP '2023-03-15 00:00:01', DATE '2023-03-14')") - ); + buildExprAst("timestampdiff(WEEK, TIMESTAMP '2023-03-15 00:00:01', DATE '2023-03-14')")); } @Test public void canBuildComparisonExpression() { - assertEquals( - function("!=", intLiteral(1), intLiteral(2)), - buildExprAst("1 != 2") - ); + assertEquals(function("!=", intLiteral(1), intLiteral(2)), buildExprAst("1 != 2")); - assertEquals( - function("!=", intLiteral(1), intLiteral(2)), - buildExprAst("1 <> 2") - ); + assertEquals(function("!=", intLiteral(1), intLiteral(2)), buildExprAst("1 <> 2")); } @Test public void canBuildNullTestExpression() { - assertEquals( - function("is null", intLiteral(1)), - buildExprAst("1 is NULL") - ); + assertEquals(function("is null", intLiteral(1)), buildExprAst("1 is NULL")); - assertEquals( - function("is not null", intLiteral(1)), - buildExprAst("1 IS NOT null") - ); + assertEquals(function("is not null", intLiteral(1)), buildExprAst("1 IS NOT null")); } @Test public void canBuildNullTestExpressionWithNULLLiteral() { - assertEquals( - function("is null", nullLiteral()), - buildExprAst("NULL is NULL") - ); + assertEquals(function("is null", nullLiteral()), buildExprAst("NULL is NULL")); - assertEquals( - function("is not null", nullLiteral()), - buildExprAst("NULL IS NOT null") - ); + assertEquals(function("is not null", nullLiteral()), buildExprAst("NULL IS NOT null")); } @Test public void canBuildLikeExpression() { assertEquals( function("like", stringLiteral("str"), stringLiteral("st%")), - buildExprAst("'str' like 'st%'") - ); + buildExprAst("'str' like 'st%'")); assertEquals( function("not like", stringLiteral("str"), stringLiteral("st%")), - buildExprAst("'str' not like 'st%'") - ); + buildExprAst("'str' not like 'st%'")); } @Test public void canBuildRegexpExpression() { assertEquals( function("regexp", stringLiteral("str"), stringLiteral(".*")), - buildExprAst("'str' regexp '.*'") - ); + buildExprAst("'str' regexp '.*'")); } @Test public void canBuildBetweenExpression() { assertEquals( - between( - qualifiedName("age"), intLiteral(10), intLiteral(30)), - buildExprAst("age BETWEEN 10 AND 30") - ); + between(qualifiedName("age"), intLiteral(10), intLiteral(30)), + buildExprAst("age BETWEEN 10 AND 30")); } @Test public void canBuildNotBetweenExpression() { assertEquals( - not( - between( - qualifiedName("age"), intLiteral(10), intLiteral(30))), - buildExprAst("age NOT BETWEEN 10 AND 30") - ); + not(between(qualifiedName("age"), intLiteral(10), intLiteral(30))), + buildExprAst("age NOT BETWEEN 10 AND 30")); } @Test public void canBuildLogicalExpression() { - assertEquals( - and(booleanLiteral(true), booleanLiteral(false)), - buildExprAst("true AND false") - ); + assertEquals(and(booleanLiteral(true), booleanLiteral(false)), buildExprAst("true AND false")); - assertEquals( - or(booleanLiteral(true), booleanLiteral(false)), - buildExprAst("true OR false") - ); + assertEquals(or(booleanLiteral(true), booleanLiteral(false)), buildExprAst("true OR false")); - assertEquals( - not(booleanLiteral(false)), - buildExprAst("NOT false") - ); + assertEquals(not(booleanLiteral(false)), buildExprAst("NOT false")); } @Test @@ -373,8 +273,8 @@ public void canBuildWindowFunctionWithNullOrderSpecified() { window( function("DENSE_RANK"), ImmutableList.of(), - ImmutableList.of(ImmutablePair.of( - new SortOption(ASC, NULL_LAST), qualifiedName("age")))), + ImmutableList.of( + ImmutablePair.of(new SortOption(ASC, NULL_LAST), qualifiedName("age")))), buildExprAst("DENSE_RANK() OVER (ORDER BY age ASC NULLS LAST)")); } @@ -382,35 +282,27 @@ public void canBuildWindowFunctionWithNullOrderSpecified() { public void canBuildStringLiteralHighlightFunction() { HashMap args = new HashMap<>(); assertEquals( - highlight(AstDSL.stringLiteral("fieldA"), args), - buildExprAst("highlight(\"fieldA\")") - ); + highlight(AstDSL.stringLiteral("fieldA"), args), buildExprAst("highlight(\"fieldA\")")); } @Test public void canBuildQualifiedNameHighlightFunction() { HashMap args = new HashMap<>(); assertEquals( - highlight(AstDSL.qualifiedName("fieldA"), args), - buildExprAst("highlight(fieldA)") - ); + highlight(AstDSL.qualifiedName("fieldA"), args), buildExprAst("highlight(fieldA)")); } @Test public void canBuildStringLiteralPositionFunction() { assertEquals( - function("position", stringLiteral("substr"), stringLiteral("str")), - buildExprAst("position(\"substr\" IN \"str\")") - ); + function("position", stringLiteral("substr"), stringLiteral("str")), + buildExprAst("position(\"substr\" IN \"str\")")); } @Test public void canBuildWindowFunctionWithoutOrderBy() { assertEquals( - window( - function("RANK"), - ImmutableList.of(qualifiedName("state")), - ImmutableList.of()), + window(function("RANK"), ImmutableList.of(qualifiedName("state")), ImmutableList.of()), buildExprAst("RANK() OVER (PARTITION BY state)")); } @@ -420,8 +312,7 @@ public void canBuildAggregateWindowFunction() { window( aggregate("AVG", qualifiedName("age")), ImmutableList.of(qualifiedName("state")), - ImmutableList.of(ImmutablePair.of( - new SortOption(null, null), qualifiedName("age")))), + ImmutableList.of(ImmutablePair.of(new SortOption(null, null), qualifiedName("age")))), buildExprAst("AVG(age) OVER (PARTITION BY state ORDER BY age)")); } @@ -430,11 +321,8 @@ public void canBuildCaseConditionStatement() { assertEquals( caseWhen( null, // no else statement - when( - function(">", qualifiedName("age"), intLiteral(30)), - stringLiteral("age1"))), - buildExprAst("CASE WHEN age > 30 THEN 'age1' END") - ); + when(function(">", qualifiedName("age"), intLiteral(30)), stringLiteral("age1"))), + buildExprAst("CASE WHEN age > 30 THEN 'age1' END")); } @Test @@ -444,168 +332,147 @@ public void canBuildCaseValueStatement() { qualifiedName("age"), stringLiteral("age2"), when(intLiteral(30), stringLiteral("age1"))), - buildExprAst("CASE age WHEN 30 THEN 'age1' ELSE 'age2' END") - ); + buildExprAst("CASE age WHEN 30 THEN 'age1' ELSE 'age2' END")); } @Test public void canBuildKeywordsAsIdentifiers() { - assertEquals( - qualifiedName("timestamp"), - buildExprAst("timestamp") - ); + assertEquals(qualifiedName("timestamp"), buildExprAst("timestamp")); } @Test public void canBuildKeywordsAsIdentInQualifiedName() { - assertEquals( - qualifiedName("test", "timestamp"), - buildExprAst("test.timestamp") - ); + assertEquals(qualifiedName("test", "timestamp"), buildExprAst("test.timestamp")); } @Test public void canBuildMetaDataFieldAsQualifiedName() { - Stream.of("_id", "_index", "_sort", "_score", "_maxscore").forEach( - field -> assertEquals( - qualifiedName(field), - buildExprAst(field) - ) - ); + Stream.of("_id", "_index", "_sort", "_score", "_maxscore") + .forEach(field -> assertEquals(qualifiedName(field), buildExprAst(field))); } @Test public void canBuildNonMetaDataFieldAsQualifiedName() { - Stream.of("id", "__id", "_routing", "___field").forEach( - field -> assertEquals( - qualifiedName(field), - buildExprAst(field) - ) - ); + Stream.of("id", "__id", "_routing", "___field") + .forEach(field -> assertEquals(qualifiedName(field), buildExprAst(field))); } @Test public void canCastFieldAsString() { assertEquals( AstDSL.cast(qualifiedName("state"), stringLiteral("string")), - buildExprAst("cast(state as string)") - ); + buildExprAst("cast(state as string)")); } @Test public void canCastValueAsString() { assertEquals( - AstDSL.cast(intLiteral(1), stringLiteral("string")), - buildExprAst("cast(1 as string)") - ); + AstDSL.cast(intLiteral(1), stringLiteral("string")), buildExprAst("cast(1 as string)")); } @Test public void filteredAggregation() { assertEquals( - AstDSL.filteredAggregate("avg", qualifiedName("age"), - function(">", qualifiedName("age"), intLiteral(20))), - buildExprAst("avg(age) filter(where age > 20)") - ); + AstDSL.filteredAggregate( + "avg", qualifiedName("age"), function(">", qualifiedName("age"), intLiteral(20))), + buildExprAst("avg(age) filter(where age > 20)")); } @Test public void canBuildVarSamp() { - assertEquals( - aggregate("var_samp", qualifiedName("age")), - buildExprAst("var_samp(age)")); + assertEquals(aggregate("var_samp", qualifiedName("age")), buildExprAst("var_samp(age)")); } @Test public void canBuildVarPop() { - assertEquals( - aggregate("var_pop", qualifiedName("age")), - buildExprAst("var_pop(age)")); + assertEquals(aggregate("var_pop", qualifiedName("age")), buildExprAst("var_pop(age)")); } @Test public void canBuildVariance() { - assertEquals( - aggregate("variance", qualifiedName("age")), - buildExprAst("variance(age)")); + assertEquals(aggregate("variance", qualifiedName("age")), buildExprAst("variance(age)")); } @Test public void distinctCount() { assertEquals( AstDSL.distinctAggregate("count", qualifiedName("name")), - buildExprAst("count(distinct name)") - ); + buildExprAst("count(distinct name)")); } @Test public void filteredDistinctCount() { assertEquals( - AstDSL.filteredDistinctCount("count", qualifiedName("name"), function( - ">", qualifiedName("age"), intLiteral(30))), - buildExprAst("count(distinct name) filter(where age > 30)") - ); + AstDSL.filteredDistinctCount( + "count", qualifiedName("name"), function(">", qualifiedName("age"), intLiteral(30))), + buildExprAst("count(distinct name) filter(where age > 30)")); } @Test public void matchPhraseQueryAllParameters() { assertEquals( - AstDSL.function("matchphrasequery", + AstDSL.function( + "matchphrasequery", unresolvedArg("field", qualifiedName("test")), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("slop", stringLiteral("3")), unresolvedArg("analyzer", stringLiteral("standard")), - unresolvedArg("zero_terms_query", stringLiteral("NONE")) - ), - buildExprAst("matchphrasequery(test, 'search query', slop = 3" - + ", analyzer = 'standard', zero_terms_query='NONE'" - + ")") - ); + unresolvedArg("zero_terms_query", stringLiteral("NONE"))), + buildExprAst( + "matchphrasequery(test, 'search query', slop = 3" + + ", analyzer = 'standard', zero_terms_query='NONE'" + + ")")); } @Test public void matchPhrasePrefixAllParameters() { assertEquals( - AstDSL.function("match_phrase_prefix", - unresolvedArg("field", qualifiedName("test")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")), - unresolvedArg("boost", stringLiteral("1.5")), - unresolvedArg("analyzer", stringLiteral("standard")), - unresolvedArg("max_expansions", stringLiteral("4")), - unresolvedArg("zero_terms_query", stringLiteral("NONE")) - ), - buildExprAst("match_phrase_prefix(test, 'search query', slop = 3, boost = 1.5" - + ", analyzer = 'standard', max_expansions = 4, zero_terms_query='NONE'" - + ")") - ); + AstDSL.function( + "match_phrase_prefix", + unresolvedArg("field", qualifiedName("test")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")), + unresolvedArg("boost", stringLiteral("1.5")), + unresolvedArg("analyzer", stringLiteral("standard")), + unresolvedArg("max_expansions", stringLiteral("4")), + unresolvedArg("zero_terms_query", stringLiteral("NONE"))), + buildExprAst( + "match_phrase_prefix(test, 'search query', slop = 3, boost = 1.5" + + ", analyzer = 'standard', max_expansions = 4, zero_terms_query='NONE'" + + ")")); } @Test public void relevanceMatch() { - assertEquals(AstDSL.function("match", - unresolvedArg("field", qualifiedName("message")), - unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("match('message', 'search query')") - ); - - assertEquals(AstDSL.function("match", - unresolvedArg("field", qualifiedName("message")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("analyzer", stringLiteral("keyword")), - unresolvedArg("operator", stringLiteral("AND"))), + assertEquals( + AstDSL.function( + "match", + unresolvedArg("field", qualifiedName("message")), + unresolvedArg("query", stringLiteral("search query"))), + buildExprAst("match('message', 'search query')")); + + assertEquals( + AstDSL.function( + "match", + unresolvedArg("field", qualifiedName("message")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("analyzer", stringLiteral("keyword")), + unresolvedArg("operator", stringLiteral("AND"))), buildExprAst("match('message', 'search query', analyzer='keyword', operator='AND')")); } @Test public void relevanceMatchQuery() { - assertEquals(AstDSL.function("matchquery", + assertEquals( + AstDSL.function( + "matchquery", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("matchquery('message', 'search query')") - ); + buildExprAst("matchquery('message', 'search query')")); - assertEquals(AstDSL.function("matchquery", + assertEquals( + AstDSL.function( + "matchquery", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), @@ -615,13 +482,16 @@ public void relevanceMatchQuery() { @Test public void relevanceMatch_Query() { - assertEquals(AstDSL.function("match_query", + assertEquals( + AstDSL.function( + "match_query", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("match_query('message', 'search query')") - ); + buildExprAst("match_query('message', 'search query')")); - assertEquals(AstDSL.function("match_query", + assertEquals( + AstDSL.function( + "match_query", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), @@ -631,238 +501,279 @@ public void relevanceMatch_Query() { @Test public void relevanceMatchQueryAltSyntax() { - assertEquals(AstDSL.function("match_query", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = match_query('search query')").toString() - ); + assertEquals( + AstDSL.function( + "match_query", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = match_query('search query')").toString()); - assertEquals(AstDSL.function("match_query", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = match_query(\"search query\")").toString() - ); + assertEquals( + AstDSL.function( + "match_query", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = match_query(\"search query\")").toString()); - assertEquals(AstDSL.function("matchquery", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = matchquery('search query')").toString() - ); + assertEquals( + AstDSL.function( + "matchquery", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = matchquery('search query')").toString()); - assertEquals(AstDSL.function("matchquery", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = matchquery(\"search query\")").toString() - ); + assertEquals( + AstDSL.function( + "matchquery", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = matchquery(\"search query\")").toString()); } @Test public void relevanceMatchPhraseAltSyntax() { - assertEquals(AstDSL.function("match_phrase", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = match_phrase('search query')").toString() - ); + assertEquals( + AstDSL.function( + "match_phrase", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = match_phrase('search query')").toString()); - assertEquals(AstDSL.function("match_phrase", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = match_phrase(\"search query\")").toString() - ); + assertEquals( + AstDSL.function( + "match_phrase", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = match_phrase(\"search query\")").toString()); - assertEquals(AstDSL.function("matchphrase", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = matchphrase('search query')").toString() - ); + assertEquals( + AstDSL.function( + "matchphrase", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = matchphrase('search query')").toString()); - assertEquals(AstDSL.function("matchphrase", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = matchphrase(\"search query\")").toString() - ); + assertEquals( + AstDSL.function( + "matchphrase", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = matchphrase(\"search query\")").toString()); } @Test public void relevanceMultiMatchAltSyntax() { - assertEquals(AstDSL.function("multi_match", + assertEquals( + AstDSL.function( + "multi_match", unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("field1 = multi_match('search query')") - ); + buildExprAst("field1 = multi_match('search query')")); - assertEquals(AstDSL.function("multi_match", + assertEquals( + AstDSL.function( + "multi_match", unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("field1 = multi_match(\"search query\")") - ); + buildExprAst("field1 = multi_match(\"search query\")")); - assertEquals(AstDSL.function("multimatch", + assertEquals( + AstDSL.function( + "multimatch", unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("field1 = multimatch('search query')") - ); + buildExprAst("field1 = multimatch('search query')")); - assertEquals(AstDSL.function("multimatch", + assertEquals( + AstDSL.function( + "multimatch", unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("field1 = multimatch(\"search query\")") - ); + buildExprAst("field1 = multimatch(\"search query\")")); } @Test public void relevanceMulti_match() { - assertEquals(AstDSL.function("multi_match", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "multi_match", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("multi_match(['field1', 'field2' ^ 3.2], 'search query')") - ); + buildExprAst("multi_match(['field1', 'field2' ^ 3.2], 'search query')")); - assertEquals(AstDSL.function("multi_match", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "multi_match", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("operator", stringLiteral("AND"))), - buildExprAst("multi_match(['field1', 'field2' ^ 3.2], 'search query'," - + "analyzer='keyword', 'operator'='AND')")); + buildExprAst( + "multi_match(['field1', 'field2' ^ 3.2], 'search query'," + + "analyzer='keyword', 'operator'='AND')")); } @Test public void relevanceMultimatch_alternate_parameter_syntax() { - assertEquals(AstDSL.function("multimatch", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1F, "field2", 2F))), + assertEquals( + AstDSL.function( + "multimatch", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field1", 1F, "field2", 2F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("multimatch(query='search query', fields=['field1^1.0,field2^2.0'])") - ); + buildExprAst("multimatch(query='search query', fields=['field1^1.0,field2^2.0'])")); - assertEquals(AstDSL.function("multimatch", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1F, "field2", 2F))), + assertEquals( + AstDSL.function( + "multimatch", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field1", 1F, "field2", 2F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("operator", stringLiteral("AND"))), - buildExprAst("multimatch(query='search query', fields=['field1^1.0,field2^2.0']," - + "analyzer='keyword', operator='AND')")); + buildExprAst( + "multimatch(query='search query', fields=['field1^1.0,field2^2.0']," + + "analyzer='keyword', operator='AND')")); } @Test public void relevanceMultimatchquery_alternate_parameter_syntax() { - assertEquals(AstDSL.function("multimatchquery", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field", 1F))), + assertEquals( + AstDSL.function( + "multimatchquery", + unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field", 1F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("multimatchquery(query='search query', fields='field')") - ); + buildExprAst("multimatchquery(query='search query', fields='field')")); - assertEquals(AstDSL.function("multimatchquery", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field", 1F))), + assertEquals( + AstDSL.function( + "multimatchquery", + unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field", 1F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("operator", stringLiteral("AND"))), - buildExprAst("multimatchquery(query='search query', fields='field'," - + "analyzer='keyword', 'operator'='AND')")); + buildExprAst( + "multimatchquery(query='search query', fields='field'," + + "analyzer='keyword', 'operator'='AND')")); } @Test public void relevanceSimple_query_string() { - assertEquals(AstDSL.function("simple_query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "simple_query_string", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("simple_query_string(['field1', 'field2' ^ 3.2], 'search query')") - ); + buildExprAst("simple_query_string(['field1', 'field2' ^ 3.2], 'search query')")); - assertEquals(AstDSL.function("simple_query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "simple_query_string", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("operator", stringLiteral("AND"))), - buildExprAst("simple_query_string(['field1', 'field2' ^ 3.2], 'search query'," - + "analyzer='keyword', operator='AND')")); + buildExprAst( + "simple_query_string(['field1', 'field2' ^ 3.2], 'search query'," + + "analyzer='keyword', operator='AND')")); } @Test public void relevanceQuery_string() { - assertEquals(AstDSL.function("query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "query_string", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("query_string(['field1', 'field2' ^ 3.2], 'search query')") - ); + buildExprAst("query_string(['field1', 'field2' ^ 3.2], 'search query')")); - assertEquals(AstDSL.function("query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "query_string", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("time_zone", stringLiteral("Canada/Pacific")), unresolvedArg("tie_breaker", stringLiteral("1.3"))), - buildExprAst("query_string(['field1', 'field2' ^ 3.2], 'search query'," - + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')")); + buildExprAst( + "query_string(['field1', 'field2' ^ 3.2], 'search query'," + + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')")); } @Test public void relevanceWildcard_query() { - assertEquals(AstDSL.function("wildcard_query", + assertEquals( + AstDSL.function( + "wildcard_query", unresolvedArg("field", qualifiedName("field")), unresolvedArg("query", stringLiteral("search query*")), unresolvedArg("boost", stringLiteral("1.5")), unresolvedArg("case_insensitive", stringLiteral("true")), unresolvedArg("rewrite", stringLiteral("scoring_boolean"))), - buildExprAst("wildcard_query(field, 'search query*', boost=1.5," - + "case_insensitive=true, rewrite='scoring_boolean'))") - ); + buildExprAst( + "wildcard_query(field, 'search query*', boost=1.5," + + "case_insensitive=true, rewrite='scoring_boolean'))")); } @Test public void relevanceScore_query() { assertEquals( AstDSL.score( - AstDSL.function("query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), - unresolvedArg("query", stringLiteral("search query")) - ), - AstDSL.doubleLiteral(1.0) - ), - buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'))") - ); + AstDSL.function( + "query_string", + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), + unresolvedArg("query", stringLiteral("search query"))), + AstDSL.doubleLiteral(1.0)), + buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'))")); } @Test public void relevanceScore_withBoost_query() { assertEquals( AstDSL.score( - AstDSL.function("query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), - unresolvedArg("query", stringLiteral("search query")) - ), - doubleLiteral(1.0) - ), - buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'), 1.0)") - ); + AstDSL.function( + "query_string", + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), + unresolvedArg("query", stringLiteral("search query"))), + doubleLiteral(1.0)), + buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'), 1.0)")); } @Test public void relevanceQuery() { - assertEquals(AstDSL.function("query", - unresolvedArg("query", stringLiteral("field1:query OR field2:query"))), - buildExprAst("query('field1:query OR field2:query')") - ); + assertEquals( + AstDSL.function( + "query", unresolvedArg("query", stringLiteral("field1:query OR field2:query"))), + buildExprAst("query('field1:query OR field2:query')")); - assertEquals(AstDSL.function("query", - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("analyzer", stringLiteral("keyword")), - unresolvedArg("time_zone", stringLiteral("Canada/Pacific")), - unresolvedArg("tie_breaker", stringLiteral("1.3"))), - buildExprAst("query('search query'," - + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')")); + assertEquals( + AstDSL.function( + "query", + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("analyzer", stringLiteral("keyword")), + unresolvedArg("time_zone", stringLiteral("Canada/Pacific")), + unresolvedArg("tie_breaker", stringLiteral("1.3"))), + buildExprAst( + "query('search query'," + + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')")); } @Test @@ -876,7 +787,8 @@ public void canBuildInClause() { buildExprAst("age not in (20, 30)")); assertEquals( - AstDSL.in(qualifiedName("age"), + AstDSL.in( + qualifiedName("age"), AstDSL.function("abs", AstDSL.intLiteral(20)), AstDSL.function("abs", AstDSL.intLiteral(30))), buildExprAst("age in (abs(20), abs(30))")); diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java index 1cb1ab5f8b..b2e4c54160 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -30,8 +29,7 @@ @ExtendWith(MockitoExtension.class) class AstHavingFilterBuilderTest { - @Mock - private QuerySpecification querySpec; + @Mock private QuerySpecification querySpec; private AstHavingFilterBuilder builder; diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java index 4ce2a2d3f7..639d73e419 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java @@ -25,32 +25,29 @@ class AstNowLikeFunctionTest extends AstBuilderTestBase { private static Stream allFunctions() { - return Stream.of("curdate", - "current_date", - "current_time", - "current_timestamp", - "curtime", - "localtimestamp", - "localtime", - "now", - "sysdate", - "utc_date", - "utc_time", - "utc_timestamp") + return Stream.of( + "curdate", + "current_date", + "current_time", + "current_timestamp", + "curtime", + "localtimestamp", + "localtime", + "now", + "sysdate", + "utc_date", + "utc_time", + "utc_timestamp") .map(Arguments::of); } private static Stream supportFsp() { - return Stream.of("sysdate") - .map(Arguments::of); + return Stream.of("sysdate").map(Arguments::of); } private static Stream supportShortcut() { - return Stream.of("current_date", - "current_time", - "current_timestamp", - "localtimestamp", - "localtime") + return Stream.of( + "current_date", "current_time", "current_timestamp", "localtimestamp", "localtime") .map(Arguments::of); } @@ -59,12 +56,7 @@ private static Stream supportShortcut() { void project_call(String name) { String call = name + "()"; assertEquals( - project( - values(emptyList()), - alias(call, function(name)) - ), - buildAST("SELECT " + call) - ); + project(values(emptyList()), alias(call, function(name))), buildAST("SELECT " + call)); } @ParameterizedTest @@ -73,29 +65,16 @@ void filter_call(String name) { String call = name + "()"; assertEquals( project( - filter( - relation("test"), - function( - "=", - qualifiedName("data"), - function(name)) - ), - AllFields.of() - ), - buildAST("SELECT * FROM test WHERE data = " + call) - ); + filter(relation("test"), function("=", qualifiedName("data"), function(name))), + AllFields.of()), + buildAST("SELECT * FROM test WHERE data = " + call)); } - @ParameterizedTest @MethodSource("supportFsp") void fsp(String name) { assertEquals( - project( - values(emptyList()), - alias(name + "(0)", function(name, intLiteral(0))) - ), - buildAST("SELECT " + name + "(0)") - ); + project(values(emptyList()), alias(name + "(0)", function(name, intLiteral(0)))), + buildAST("SELECT " + name + "(0)")); } } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java index 28665dd7ef..b0a7592990 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -66,9 +65,10 @@ public void functionNameCanBeUsedAsIdentifier() { void assertFunctionNameCouldBeId(String antlrFunctionName) { List functionList = - Arrays.stream(antlrFunctionName.split("\\|")).map(String::stripLeading) - .map(String::stripTrailing).collect( - Collectors.toList()); + Arrays.stream(antlrFunctionName.split("\\|")) + .map(String::stripLeading) + .map(String::stripTrailing) + .collect(Collectors.toList()); assertFalse(functionList.isEmpty()); for (String functionName : functionList) { @@ -109,5 +109,4 @@ private OpenSearchSQLParser createParser(String expr) { return parser; } } - } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java index 3c8d155e65..f72f1ba0ff 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -40,14 +39,11 @@ @ExtendWith(MockitoExtension.class) class AstSortBuilderTest { - @Mock - private QuerySpecification querySpec; + @Mock private QuerySpecification querySpec; - @Mock - private OrderByClauseContext orderByClause; + @Mock private OrderByClauseContext orderByClause; - @Mock - private UnresolvedPlan child; + @Mock private UnresolvedPlan child; @Test void can_build_sort_node() { @@ -56,32 +52,35 @@ void can_build_sort_node() { ImmutableMap> expects = ImmutableMap.>builder() - .put(new SortOption(null, null), - ImmutableList.of(argument("asc", booleanLiteral(true)))) - .put(new SortOption(ASC, null), - ImmutableList.of(argument("asc", booleanLiteral(true)))) - .put(new SortOption(DESC, null), + .put( + new SortOption(null, null), ImmutableList.of(argument("asc", booleanLiteral(true)))) + .put(new SortOption(ASC, null), ImmutableList.of(argument("asc", booleanLiteral(true)))) + .put( + new SortOption(DESC, null), ImmutableList.of(argument("asc", booleanLiteral(false)))) - .put(new SortOption(null, NULL_LAST), + .put( + new SortOption(null, NULL_LAST), ImmutableList.of( argument("asc", booleanLiteral(true)), argument("nullFirst", booleanLiteral(false)))) - .put(new SortOption(DESC, NULL_FIRST), + .put( + new SortOption(DESC, NULL_FIRST), ImmutableList.of( argument("asc", booleanLiteral(false)), argument("nullFirst", booleanLiteral(true)))) .build(); - expects.forEach((option, expect) -> { - when(querySpec.getOrderByOptions()).thenReturn(ImmutableList.of(option)); + expects.forEach( + (option, expect) -> { + when(querySpec.getOrderByOptions()).thenReturn(ImmutableList.of(option)); - AstSortBuilder sortBuilder = new AstSortBuilder(querySpec); - assertEquals( - new Sort( - child, // has to mock and attach child otherwise Guava ImmutableList NPE in getChild() - ImmutableList.of(field("name", expect))), - sortBuilder.visitOrderByClause(orderByClause).attach(child)); - }); + AstSortBuilder sortBuilder = new AstSortBuilder(querySpec); + assertEquals( + new Sort( + child, // has to mock and attach child otherwise Guava ImmutableList NPE in + // getChild() + ImmutableList.of(field("name", expect))), + sortBuilder.visitOrderByClause(orderByClause).attach(child)); + }); } - } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java index 2f75e89002..6dd027a74c 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser.context; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -35,32 +34,27 @@ class QuerySpecificationTest { @Test void can_collect_group_by_items_in_group_by_clause() { - QuerySpecification querySpec = collect( - "SELECT name, ABS(age) FROM test GROUP BY name, ABS(age)"); + QuerySpecification querySpec = + collect("SELECT name, ABS(age) FROM test GROUP BY name, ABS(age)"); assertEquals( - ImmutableList.of( - qualifiedName("name"), - function("ABS", qualifiedName("age"))), + ImmutableList.of(qualifiedName("name"), function("ABS", qualifiedName("age"))), querySpec.getGroupByItems()); } @Test void can_collect_select_items_in_select_clause() { - QuerySpecification querySpec = collect( - "SELECT name, ABS(age) FROM test"); + QuerySpecification querySpec = collect("SELECT name, ABS(age) FROM test"); assertEquals( - ImmutableList.of( - qualifiedName("name"), - function("ABS", qualifiedName("age"))), + ImmutableList.of(qualifiedName("name"), function("ABS", qualifiedName("age"))), querySpec.getSelectItems()); } @Test void can_collect_aggregators_in_select_clause() { - QuerySpecification querySpec = collect( - "SELECT name, AVG(age), SUM(balance) FROM test GROUP BY name"); + QuerySpecification querySpec = + collect("SELECT name, AVG(age), SUM(balance) FROM test GROUP BY name"); assertEquals( ImmutableSet.of( @@ -71,29 +65,25 @@ void can_collect_aggregators_in_select_clause() { @Test void can_collect_nested_aggregators_in_select_clause() { - QuerySpecification querySpec = collect( - "SELECT name, ABS(1 + AVG(age)) FROM test GROUP BY name"); + QuerySpecification querySpec = + collect("SELECT name, ABS(1 + AVG(age)) FROM test GROUP BY name"); assertEquals( - ImmutableSet.of( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), + ImmutableSet.of(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), querySpec.getAggregators()); } @Test void can_collect_alias_in_select_clause() { - QuerySpecification querySpec = collect( - "SELECT name AS n FROM test GROUP BY n"); + QuerySpecification querySpec = collect("SELECT name AS n FROM test GROUP BY n"); - assertEquals( - ImmutableMap.of("n", qualifiedName("name")), - querySpec.getSelectItemsByAlias()); + assertEquals(ImmutableMap.of("n", qualifiedName("name")), querySpec.getSelectItemsByAlias()); } @Test void should_deduplicate_same_aggregators() { - QuerySpecification querySpec = collect( - "SELECT AVG(age), AVG(balance), AVG(age) FROM test GROUP BY name"); + QuerySpecification querySpec = + collect("SELECT AVG(age), AVG(balance), AVG(age) FROM test GROUP BY name"); assertEquals( ImmutableSet.of( @@ -119,20 +109,24 @@ void can_collect_sort_options_in_order_by_clause() { @Test void should_skip_sort_items_in_window_function() { - assertEquals(1, - collect("SELECT name, RANK() OVER(ORDER BY age) " - + "FROM test ORDER BY name" - ).getOrderByOptions().size()); + assertEquals( + 1, + collect("SELECT name, RANK() OVER(ORDER BY age) FROM test ORDER BY name") + .getOrderByOptions() + .size()); } @Test void can_collect_filtered_aggregation() { assertEquals( - ImmutableSet.of(alias("AVG(age) FILTER(WHERE age > 20)", - filteredAggregate("AVG", qualifiedName("age"), - function(">", qualifiedName("age"), intLiteral(20))))), - collect("SELECT AVG(age) FILTER(WHERE age > 20) FROM test").getAggregators() - ); + ImmutableSet.of( + alias( + "AVG(age) FILTER(WHERE age > 20)", + filteredAggregate( + "AVG", + qualifiedName("age"), + function(">", qualifiedName("age"), intLiteral(20))))), + collect("SELECT AVG(age) FILTER(WHERE age > 20) FROM test").getAggregators()); } private QuerySpecification collect(String query) { @@ -147,5 +141,4 @@ private QuerySpecificationContext parse(String query) { parser.addErrorListener(new SyntaxAnalysisErrorListener()); return parser.querySpecification(); } - }