diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index f4c7fd87495..27ef2349d35 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -28,6 +28,7 @@ rocksdbjni = "7.7.3" iceberg = '1.3.1' trino = '426' spark = "3.4.1" +scala-collection-compat = "2.7.0" protobuf-plugin = "0.9.2" @@ -91,6 +92,7 @@ trino-toolkit= { group = "io.trino", name = "trino-plugin-toolkit", version.ref trino-testing= { group = "io.trino", name = "trino-testing", version.ref = "trino" } iceberg-spark-runtime = { group = "org.apache.iceberg", name = "iceberg-spark-runtime-3.4_2.13", version.ref = "iceberg" } spark-sql = { group = "org.apache.spark", name = "spark-sql_2.13", version.ref = "spark" } +scala-collection-compat = { group = "org.scala-lang.modules", name = "scala-collection-compat_2.13", version.ref = "scala-collection-compat" } [bundles] diff --git a/integration-test/build.gradle.kts b/integration-test/build.gradle.kts index 0d5ef1fcef1..40e5d473c55 100644 --- a/integration-test/build.gradle.kts +++ b/integration-test/build.gradle.kts @@ -105,6 +105,7 @@ dependencies { exclude("org.apache.zookeeper") exclude("io.dropwizard.metrics") } + testImplementation(libs.scala.collection.compat) testImplementation(libs.slf4j.jdk14) } diff --git a/integration-test/src/main/java/com/datastrato/graviton/integration/test/MiniGraviton.java b/integration-test/src/main/java/com/datastrato/graviton/integration/test/MiniGraviton.java index 19da8da9fe4..126ae0188e5 100644 --- a/integration-test/src/main/java/com/datastrato/graviton/integration/test/MiniGraviton.java +++ b/integration-test/src/main/java/com/datastrato/graviton/integration/test/MiniGraviton.java @@ -23,8 +23,6 @@ import com.google.common.collect.ImmutableMap; import java.io.File; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Collections; @@ -41,6 +39,7 @@ public class MiniGraviton { private static final Logger LOG = LoggerFactory.getLogger(MiniGraviton.class); + private MiniGravitonContext context; private RESTClient restClient; private final File mockConfDir; private final ServerConfig serverConfig = new ServerConfig(); @@ -50,7 +49,8 @@ public class MiniGraviton { private int port; - public MiniGraviton() throws IOException { + public MiniGraviton(MiniGravitonContext context) throws IOException { + this.context = context; this.mockConfDir = Files.createTempDirectory("MiniGraviton").toFile(); mockConfDir.mkdirs(); } @@ -154,52 +154,43 @@ public Config getServerConfig() { return serverConfig; } - // Customize the config file - private void customizeConfigFile(String configTempFileName, String configFileName) - throws IOException { - Map configMap = new HashMap<>(); - configMap.put( - GravitonServer.WEBSERVER_CONF_PREFIX + JettyServerConfig.WEBSERVER_HTTP_PORT.getKey(), - String.valueOf(RESTUtils.findAvailablePort(2000, 3000))); - configMap.put( - Configs.ENTRY_KV_ROCKSDB_BACKEND_PATH.getKey(), "/tmp/graviton-" + UUID.randomUUID()); - - configMap.put( - AuxiliaryServiceManager.GRAVITON_AUX_SERVICE_PREFIX - + AuxiliaryServiceManager.AUX_SERVICE_NAMES, - IcebergRESTService.SERVICE_NAME); + Map getIcebergRestServiceConfigs() throws IOException { + Map customConfigs = new HashMap<>(); String icebergJarPath = Paths.get("catalogs", "catalog-lakehouse-iceberg", "build", "libs").toString(); String icebergConfigPath = Paths.get("catalogs", "catalog-lakehouse-iceberg", "src", "main", "resources").toString(); - configMap.put( + customConfigs.put( AuxiliaryServiceManager.GRAVITON_AUX_SERVICE_PREFIX + IcebergRESTService.SERVICE_NAME + "." + AuxiliaryServiceManager.AUX_SERVICE_CLASSPATH, String.join(",", icebergJarPath, icebergConfigPath)); - configMap.put( + + customConfigs.put( AuxiliaryServiceManager.GRAVITON_AUX_SERVICE_PREFIX + IcebergRESTService.SERVICE_NAME + "." + JettyServerConfig.WEBSERVER_HTTP_PORT.getKey(), String.valueOf(RESTUtils.findAvailablePort(3000, 4000))); + return customConfigs; + } - Properties props = new Properties(); - - try (InputStream inputStream = Files.newInputStream(Paths.get(configTempFileName)); - OutputStream outputStream = Files.newOutputStream(Paths.get(configFileName))) { - props.load(inputStream); + // Customize the config file + private void customizeConfigFile(String configTempFileName, String configFileName) + throws IOException { + Map configMap = new HashMap<>(); + configMap.put( + GravitonServer.WEBSERVER_CONF_PREFIX + JettyServerConfig.WEBSERVER_HTTP_PORT.getKey(), + String.valueOf(RESTUtils.findAvailablePort(2000, 3000))); + configMap.put( + Configs.ENTRY_KV_ROCKSDB_BACKEND_PATH.getKey(), "/tmp/graviton-" + UUID.randomUUID()); - for (Map.Entry entry : configMap.entrySet()) { - props.setProperty(entry.getKey(), entry.getValue()); - } + configMap.putAll(getIcebergRestServiceConfigs()); + configMap.putAll(context.customConfig); - props.store(outputStream, null); - } catch (IOException e) { - LOG.error("Exception in customizeConfigFile ", e); - } + ITUtils.rewriteConfigFile(configTempFileName, configFileName, configMap); } private boolean checkIfServerIsRunning() { diff --git a/integration-test/src/main/java/com/datastrato/graviton/integration/test/MiniGravitonContext.java b/integration-test/src/main/java/com/datastrato/graviton/integration/test/MiniGravitonContext.java new file mode 100644 index 00000000000..40459be7260 --- /dev/null +++ b/integration-test/src/main/java/com/datastrato/graviton/integration/test/MiniGravitonContext.java @@ -0,0 +1,16 @@ +/* + * Copyright 2023 Datastrato. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.graviton.integration.test; + +import java.util.Map; + +public class MiniGravitonContext { + Map customConfig; + + public MiniGravitonContext(Map customConfig) { + this.customConfig = customConfig; + } +} diff --git a/integration-test/src/main/java/com/datastrato/graviton/integration/test/util/ITUtils.java b/integration-test/src/main/java/com/datastrato/graviton/integration/test/util/ITUtils.java index edadc9ec78b..c7ea5ad9cda 100644 --- a/integration-test/src/main/java/com/datastrato/graviton/integration/test/util/ITUtils.java +++ b/integration-test/src/main/java/com/datastrato/graviton/integration/test/util/ITUtils.java @@ -5,9 +5,31 @@ package com.datastrato.graviton.integration.test.util; import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Map; +import java.util.Properties; public class ITUtils { + public static final String TEST_MODE = "testMode"; + public static final String EMBEDDED_TEST_MODE = "embedded"; + public static String joinDirPath(String... dirs) { return String.join(File.separator, dirs); } + + public static void rewriteConfigFile( + String configTempFileName, String configFileName, Map configMap) + throws IOException { + Properties props = new Properties(); + try (InputStream inputStream = Files.newInputStream(Paths.get(configTempFileName)); + OutputStream outputStream = Files.newOutputStream(Paths.get(configFileName))) { + props.load(inputStream); + props.putAll(configMap); + props.store(outputStream, null); + } + } } diff --git a/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTHiveCatalogIT.java b/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTHiveCatalogIT.java new file mode 100644 index 00000000000..e7b820bb787 --- /dev/null +++ b/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTHiveCatalogIT.java @@ -0,0 +1,22 @@ +/* + * Copyright 2023 Datastrato. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.graviton.integration.test.catalog.lakehouse.iceberg; + +import com.datastrato.graviton.catalog.lakehouse.iceberg.IcebergCatalogBackend; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.TestInstance.Lifecycle; + +// Hive&Jdbc catalog must be tested with graviton-docker-it env, +// so we should create a separate class instead using junit `parameterized test` +// to auto-generate catalog type +@Tag("graviton-docker-it") +@TestInstance(Lifecycle.PER_CLASS) +public class IcebergRESTHiveCatalogIT extends IcebergRESTServiceIT { + public IcebergRESTHiveCatalogIT() { + catalogType = IcebergCatalogBackend.HIVE; + } +} diff --git a/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTServiceBaseIT.java b/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTServiceBaseIT.java index a6503f6afd2..bebd92ea3f1 100644 --- a/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTServiceBaseIT.java +++ b/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTServiceBaseIT.java @@ -7,6 +7,7 @@ import com.datastrato.graviton.Config; import com.datastrato.graviton.aux.AuxiliaryServiceManager; +import com.datastrato.graviton.catalog.lakehouse.iceberg.IcebergCatalogBackend; import com.datastrato.graviton.catalog.lakehouse.iceberg.IcebergConfig; import com.datastrato.graviton.catalog.lakehouse.iceberg.IcebergRESTService; import com.datastrato.graviton.integration.test.util.AbstractIT; @@ -15,6 +16,7 @@ import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -23,16 +25,119 @@ import org.apache.commons.lang3.StringUtils; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /* *

Referred from spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/SparkTestBase.java */ + +@TestInstance(Lifecycle.PER_CLASS) public class IcebergRESTServiceBaseIT extends AbstractIT { + public static final Logger LOG = LoggerFactory.getLogger(IcebergRESTServiceBaseIT.class); private SparkSession sparkSession; + protected IcebergCatalogBackend catalogType = IcebergCatalogBackend.MEMORY; - public IcebergRESTServiceBaseIT() { + @BeforeAll + void initIcebergTestEnv() throws Exception { + registerIcebergCatalogConfig(); + AbstractIT.startIntegrationTest(); initSparkEnv(); + LOG.info("graviton and spark env started,{}", catalogType); + } + + @AfterAll + void stopIcebergTestEnv() throws Exception { + stopSparkEnv(); + AbstractIT.stopIntegrationTest(); + LOG.info("graviton and spark env stopped,{}", catalogType); + } + + // AbstractIT#startIntegrationTest() is static, so we couldn't inject catalog info + // if startIntegrationTest() is auto invoked by Junit. so here we override + // startIntegrationTest() to disable the auto invoke by junit. + @BeforeAll + public static void startIntegrationTest() {} + + @AfterAll + public static void stopIntegrationTest() {} + + boolean catalogTypeNotMemory() { + return !catalogType.equals(IcebergCatalogBackend.MEMORY); + } + + private void registerIcebergCatalogConfig() { + Map icebergConfigs; + + switch (catalogType) { + case HIVE: + icebergConfigs = getIcebergHiveCatalogConfigs(); + break; + case JDBC: + icebergConfigs = getIcebergJdbcCatalogConfigs(); + break; + case MEMORY: + icebergConfigs = getIcebergMemoryCatalogConfigs(); + break; + default: + throw new RuntimeException("Not support Iceberg catalog type:" + catalogType); + } + + AbstractIT.registerCustomConfigs(icebergConfigs); + LOG.info("Iceberg REST service config registered," + StringUtils.join(icebergConfigs)); + } + + private static Map getIcebergMemoryCatalogConfigs() { + Map configMap = new HashMap<>(); + configMap.put( + AuxiliaryServiceManager.GRAVITON_AUX_SERVICE_PREFIX + + IcebergRESTService.SERVICE_NAME + + "." + + IcebergConfig.CATALOG_BACKEND.getKey(), + IcebergCatalogBackend.MEMORY.toString().toLowerCase()); + + configMap.put( + AuxiliaryServiceManager.GRAVITON_AUX_SERVICE_PREFIX + + IcebergRESTService.SERVICE_NAME + + "." + + IcebergConfig.CATALOG_WAREHOUSE.getKey(), + "/tmp/"); + return configMap; + } + + private static Map getIcebergJdbcCatalogConfigs() { + Map configMap = new HashMap<>(); + return configMap; + } + + private static Map getIcebergHiveCatalogConfigs() { + Map customConfigs = new HashMap<>(); + customConfigs.put( + AuxiliaryServiceManager.GRAVITON_AUX_SERVICE_PREFIX + + IcebergRESTService.SERVICE_NAME + + "." + + IcebergConfig.CATALOG_BACKEND.getKey(), + IcebergCatalogBackend.HIVE.toString().toLowerCase()); + + customConfigs.put( + AuxiliaryServiceManager.GRAVITON_AUX_SERVICE_PREFIX + + IcebergRESTService.SERVICE_NAME + + "." + + IcebergConfig.CATALOG_URI.getKey(), + "thrift://127.0.0.1:9083"); + + customConfigs.put( + AuxiliaryServiceManager.GRAVITON_AUX_SERVICE_PREFIX + + IcebergRESTService.SERVICE_NAME + + "." + + IcebergConfig.CATALOG_WAREHOUSE.getKey(), + "file:///tmp/user/hive/warehouse-hive/"); + return customConfigs; } private static IcebergConfig buildIcebergConfig(Config config) { @@ -59,8 +164,13 @@ private void initSparkEnv() { .config("spark.sql.catalog.rest.type", "rest") .config("spark.sql.catalog.rest.uri", IcebergRESTUri) .getOrCreate(); + } - sparkSession.sparkContext().setLogLevel("WARN"); + private void stopSparkEnv() { + if (sparkSession != null) { + sparkSession.close(); + sparkSession = null; + } } protected List sql(String query, Object... args) { @@ -139,6 +249,10 @@ protected Set convertToStringSet(List objects, int index) { return objects.stream().map(row -> String.valueOf(row[index])).collect(Collectors.toSet()); } + protected List convertToStringList(List objects, int index) { + return objects.stream().map(row -> String.valueOf(row[index])).collect(Collectors.toList()); + } + protected Map convertToStringMap(List objects) { return objects.stream() .collect( diff --git a/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTServiceIT.java b/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTServiceIT.java index 82aa291d987..f46cedbf9b4 100644 --- a/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTServiceIT.java +++ b/integration-test/src/test/java/com/datastrato/graviton/integration/test/catalog/lakehouse/iceberg/IcebergRESTServiceIT.java @@ -5,6 +5,7 @@ package com.datastrato.graviton.integration.test.catalog.lakehouse.iceberg; +import com.datastrato.graviton.catalog.lakehouse.iceberg.IcebergCatalogBackend; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import java.util.Arrays; @@ -12,6 +13,7 @@ import java.util.Map; import java.util.Set; import org.apache.iceberg.exceptions.BadRequestException; +import org.apache.iceberg.exceptions.ServiceFailureException; import org.apache.iceberg.exceptions.ValidationException; import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException; @@ -19,87 +21,149 @@ import org.apache.spark.sql.catalyst.analysis.NoSuchTableException; import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.junit.jupiter.api.condition.EnabledIf; +@TestInstance(Lifecycle.PER_CLASS) public class IcebergRESTServiceIT extends IcebergRESTServiceBaseIT { - @BeforeEach - void initEnv() { + private static final String ICEBERG_REST_NS_PREFIX = "iceberg_rest_"; + + @BeforeAll + void prepareSQLContext() { // use rest catalog sql("USE rest"); - sql("CREATE DATABASE IF NOT EXISTS table_test"); + purgeAllIcebergTestNamespaces(); + sql("CREATE DATABASE IF NOT EXISTS iceberg_rest_table_test"); + } + + private void purgeTable(String namespace, String table) { + sql(String.format("DROP TABLE %s.%s PURGE", namespace, table)); + } + + private void purgeNameSpace(String namespace) { + Set tables = convertToStringSet(sql("SHOW TABLES IN " + namespace), 1); + tables.forEach(table -> purgeTable(namespace, table)); + sql("DROP database " + namespace); + } + + private void purgeAllIcebergTestNamespaces() { + List databases = + sql(String.format("SHOW DATABASES like '%s*'", ICEBERG_REST_NS_PREFIX)); + Set databasesString = convertToStringSet(databases, 0); + databasesString.stream() + .filter(ns -> ns.startsWith(ICEBERG_REST_NS_PREFIX)) + .forEach(ns -> purgeNameSpace(ns)); } @Test void testCreateNamespace() { + String namespaceName = ICEBERG_REST_NS_PREFIX + "create"; sql( - "CREATE DATABASE db_create COMMENT 'This is customer database' WITH DBPROPERTIES (ID=001, Name='John')"); + String.format( + "CREATE DATABASE %s COMMENT 'This is customer database' " + + "WITH DBPROPERTIES (ID=001, Name='John')", + namespaceName)); Map databaseInfo = - convertToStringMap(sql("DESCRIBE DATABASE EXTENDED db_create")); + convertToStringMap(sql("DESCRIBE DATABASE EXTENDED " + namespaceName)); Assertions.assertEquals("This is customer database", databaseInfo.get("Comment")); - Assertions.assertEquals("db_create", databaseInfo.get("Namespace Name")); - Assertions.assertEquals("((ID,001), (Name,John))", databaseInfo.get("Properties")); + Assertions.assertEquals(namespaceName, databaseInfo.get("Namespace Name")); + String properties = databaseInfo.getOrDefault("Properties", ""); + switch (catalogType) { + case HIVE: + // hive add more properties, like: + // ((hive.metastore.database.owner,hive), (hive.metastore.database.owner-type,USER)) + Assertions.assertTrue(properties.contains("(ID,001), (Name,John)")); + break; + default: + Assertions.assertEquals("((ID,001), (Name,John))", properties); + break; + } Assertions.assertThrowsExactly( - NamespaceAlreadyExistsException.class, () -> sql("CREATE DATABASE db_create")); + NamespaceAlreadyExistsException.class, () -> sql("CREATE DATABASE " + namespaceName)); } @Test void testListNamespace() { - sql("CREATE DATABASE list_foo1"); - sql("CREATE DATABASE list_foo2"); - List databases = sql("SHOW DATABASES like 'list_foo*'"); + sql(String.format("CREATE DATABASE %slist_foo1", ICEBERG_REST_NS_PREFIX)); + sql(String.format("CREATE DATABASE %slist_foo2", ICEBERG_REST_NS_PREFIX)); + List databases = + sql(String.format("SHOW DATABASES like '%slist_foo*'", ICEBERG_REST_NS_PREFIX)); Set databasesString = convertToStringSet(databases, 0); - Assertions.assertEquals(ImmutableSet.of("list_foo1", "list_foo2"), databasesString); + Assertions.assertEquals( + ImmutableSet.of(ICEBERG_REST_NS_PREFIX + "list_foo1", ICEBERG_REST_NS_PREFIX + "list_foo2"), + databasesString); } @Test void testDropNameSpace() { - sql("CREATE DATABASE IF NOT EXISTS drop_foo1"); - sql("DESC DATABASE drop_foo1"); + String namespaceName = ICEBERG_REST_NS_PREFIX + "foo1"; + sql("CREATE DATABASE IF NOT EXISTS " + namespaceName); + sql("DESC DATABASE " + namespaceName); sql( - "CREATE TABLE IF NOT EXISTS drop_foo1.test (id bigint COMMENT 'unique id',data string) using iceberg"); + String.format( + "CREATE TABLE IF NOT EXISTS %s.test " + + "(id bigint COMMENT 'unique id',data string) using iceberg", + namespaceName)); // seems a bug in Iceberg REST client, should be NamespaceNotEmptyException - Assertions.assertThrowsExactly(BadRequestException.class, () -> sql("DROP DATABASE drop_foo1")); - sql("DROP TABLE drop_foo1.test"); - sql("DROP DATABASE drop_foo1"); + Assertions.assertThrowsExactly( + BadRequestException.class, () -> sql("DROP DATABASE " + namespaceName)); + sql(String.format("DROP TABLE %s.test", namespaceName)); + sql("DROP DATABASE " + namespaceName); Assertions.assertThrowsExactly( - NoSuchNamespaceException.class, () -> sql("DESC DATABASE drop_foo1")); + NoSuchNamespaceException.class, () -> sql("DESC DATABASE " + namespaceName)); Assertions.assertThrowsExactly( - NoSuchNamespaceException.class, () -> sql("DROP DATABASE drop_foo1")); + NoSuchNamespaceException.class, () -> sql("DROP DATABASE " + namespaceName)); } @Test void testNameSpaceProperties() { - sql("DROP DATABASE if exists alter_foo1"); - sql("CREATE DATABASE if not exists alter_foo1"); - sql("ALTER DATABASE alter_foo1 SET PROPERTIES(id = 2)"); - List datas = sql("DESC DATABASE EXTENDED alter_foo1"); + String namespaceName = ICEBERG_REST_NS_PREFIX + "alter_foo1"; + sql("DROP DATABASE if exists " + namespaceName); + sql("CREATE DATABASE if not exists " + namespaceName); + sql(String.format("ALTER DATABASE %s SET PROPERTIES(id = 2)", namespaceName)); + List datas = sql("DESC DATABASE EXTENDED " + namespaceName); Map m = convertToStringMap(datas); - Assertions.assertEquals("((id,2))", m.getOrDefault("Properties", "")); + String properties = m.getOrDefault("Properties", ""); + switch (catalogType) { + case MEMORY: + Assertions.assertEquals("((id,2))", properties); + break; + default: + // ((hive.metastore.database.owner,hive), (hive.metastore.database.owner-type,USER), (id,2)) + Assertions.assertTrue(properties.contains("(id,2)")); + } } @Test void testDML() { - sql("CREATE DATABASE IF NOT EXISTS dml"); - sql("DROP TABLE IF EXISTS dml.test"); - sql("CREATE TABLE dml.test (id bigint COMMENT 'unique id',data string) using iceberg"); - sql(" INSERT INTO dml.test VALUES (1, 'a'), (2, 'b');"); - sql(" INSERT INTO dml.test VALUES (3, 'c'), (4, 'd');"); - Map m = convertToStringMap(sql("SELECT * FROM dml.test")); + String namespaceName = ICEBERG_REST_NS_PREFIX + "dml"; + String tableName = namespaceName + ".test"; + sql("CREATE DATABASE IF NOT EXISTS " + namespaceName); + sql( + String.format( + "CREATE TABLE %s (id bigint COMMENT 'unique id',data string) using iceberg", + tableName)); + sql(String.format(" INSERT INTO %s VALUES (1, 'a'), (2, 'b');", tableName)); + sql(String.format(" INSERT INTO %s VALUES (3, 'c'), (4, 'd');", tableName)); + Map m = convertToStringMap(sql("SELECT * FROM " + tableName)); Assertions.assertEquals(m, ImmutableMap.of("1", "a", "2", "b", "3", "c", "4", "d")); } @Test void testCreateTable() { sql( - "CREATE TABLE table_test.create_foo1( id bigint, data string, ts timestamp)" + "CREATE TABLE iceberg_rest_table_test.create_foo1" + + "( id bigint, data string, ts timestamp)" + "USING iceberg PARTITIONED BY (bucket(16, id), days(ts))"); - Map tableInfo = getTableInfo("table_test.create_foo1"); + Map tableInfo = getTableInfo("iceberg_rest_table_test.create_foo1"); Map m = ImmutableMap.of( "id", "bigint", @@ -111,64 +175,91 @@ void testCreateTable() { checkMapContains(m, tableInfo); Assertions.assertThrowsExactly( - TableAlreadyExistsException.class, () -> sql("CREATE TABLE table_test.create_foo1")); + TableAlreadyExistsException.class, + () -> sql("CREATE TABLE iceberg_rest_table_test.create_foo1")); } @Test void testDropTable() { sql( - "CREATE TABLE table_test.drop_foo1(id bigint COMMENT 'unique id',data string) using iceberg"); - sql("DROP TABLE table_test.drop_foo1"); + "CREATE TABLE iceberg_rest_table_test.drop_foo1" + + "(id bigint COMMENT 'unique id',data string) using iceberg"); + sql("DROP TABLE iceberg_rest_table_test.drop_foo1"); Assertions.assertThrowsExactly( - AnalysisException.class, () -> sql("DESC TABLE table_test.drop_foo1")); + AnalysisException.class, () -> sql("DESC TABLE iceberg_rest_table_test.drop_foo1")); Assertions.assertThrowsExactly( - NoSuchTableException.class, () -> sql("DROP TABLE table_test.drop_foo1")); + NoSuchTableException.class, () -> sql("DROP TABLE iceberg_rest_table_test.drop_foo1")); } @Test void testListTable() { - sql("CREATE DATABASE if not exists list_db"); - sql("CREATE TABLE list_db.list_foo1(id bigint COMMENT 'unique id',data string) using iceberg"); - sql("CREATE TABLE list_db.list_foo2(id bigint COMMENT 'unique id',data string) using iceberg"); + String namespaceName = ICEBERG_REST_NS_PREFIX + "list_db"; + sql("CREATE DATABASE if not exists " + namespaceName); + sql( + String.format( + "CREATE TABLE %s.list_foo1(id bigint COMMENT 'unique id',data string) using iceberg", + namespaceName)); + sql( + String.format( + "CREATE TABLE %s.list_foo2(id bigint COMMENT 'unique id',data string) using iceberg", + namespaceName)); - Set tables = convertToStringSet(sql("show tables in list_db"), 1); + Set tables = convertToStringSet(sql("show tables in " + namespaceName), 1); Assertions.assertEquals(ImmutableSet.of("list_foo1", "list_foo2"), tables); } @Test void testRenameTable() { sql( - "CREATE TABLE table_test.rename_foo1(id bigint COMMENT 'unique id',data string) using iceberg"); - sql("ALTER TABLE table_test.rename_foo1 RENAME TO table_test.rename_foo2"); - sql("desc table table_test.rename_foo2"); + "CREATE TABLE iceberg_rest_table_test.rename_foo1" + + "(id bigint COMMENT 'unique id',data string) using iceberg"); + sql( + "ALTER TABLE iceberg_rest_table_test.rename_foo1 " + + "RENAME TO iceberg_rest_table_test.rename_foo2"); + sql("desc table iceberg_rest_table_test.rename_foo2"); Assertions.assertThrowsExactly( - AnalysisException.class, () -> sql("desc table table_test.rename_foo1")); + AnalysisException.class, () -> sql("desc table iceberg_rest_table_test.rename_foo1")); sql( - "CREATE TABLE table_test.rename_foo1(id bigint COMMENT 'unique id',data string) using iceberg"); + "CREATE TABLE iceberg_rest_table_test.rename_foo1" + + "(id bigint COMMENT 'unique id',data string) using iceberg"); + + Class exception = + catalogType == IcebergCatalogBackend.HIVE + ? ServiceFailureException.class + : TableAlreadyExistsException.class; + Assertions.assertThrowsExactly( - TableAlreadyExistsException.class, - () -> sql("ALTER TABLE table_test.rename_foo2 RENAME TO table_test.rename_foo1")); + exception, + () -> + sql( + "ALTER TABLE iceberg_rest_table_test.rename_foo2 " + + "RENAME TO iceberg_rest_table_test.rename_foo1")); } @Test void testSetTableProperties() { sql( - "CREATE TABLE table_test.set_foo1 (id bigint COMMENT 'unique id',data string) using iceberg"); - sql("ALTER TABLE table_test.set_foo1 SET TBLPROPERTIES ('read.split.target-size'='268435456')"); - Map m = getTableInfo("table_test.set_foo1"); + "CREATE TABLE iceberg_rest_table_test.set_foo1" + + " (id bigint COMMENT 'unique id',data string) using iceberg"); + sql( + "ALTER TABLE iceberg_rest_table_test.set_foo1 SET TBLPROPERTIES " + + "('read.split.target-size'='268435456')"); + Map m = getTableInfo("iceberg_rest_table_test.set_foo1"); Assertions.assertTrue( m.getOrDefault("Table Properties", "").contains("read.split.target-size=268435456")); - sql("ALTER TABLE table_test.set_foo1 UNSET TBLPROPERTIES ('read.split.target-size')"); - m = getTableInfo("table_test.set_foo1"); + sql( + "ALTER TABLE iceberg_rest_table_test.set_foo1 " + + "UNSET TBLPROPERTIES ('read.split.target-size')"); + m = getTableInfo("iceberg_rest_table_test.set_foo1"); Assertions.assertFalse( m.getOrDefault("Table Properties", "read.split.target-size") .contains("read.split.target-size")); - sql("ALTER TABLE table_test.set_foo1 SET TBLPROPERTIES ('comment'='a')"); - m = getTableInfo("table_test.set_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.set_foo1 SET TBLPROPERTIES ('comment'='a')"); + m = getTableInfo("iceberg_rest_table_test.set_foo1"); // comment is hidden Assertions.assertFalse(m.getOrDefault("Table Properties", "").contains("comment=a")); } @@ -176,22 +267,26 @@ void testSetTableProperties() { @Test void testAddColumns() { sql( - "CREATE TABLE table_test.add_foo1 (id bigint COMMENT 'unique id',data string) using iceberg"); + "CREATE TABLE iceberg_rest_table_test.add_foo1" + + " (id string COMMENT 'unique id',data string) using iceberg"); Assertions.assertThrowsExactly( AnalysisException.class, - () -> sql("ALTER TABLE table_test.add_foo1 ADD COLUMNS foo_after String After not_exits")); + () -> + sql( + "ALTER TABLE iceberg_rest_table_test.add_foo1 " + + "ADD COLUMNS foo_after String After not_exits")); - sql("ALTER TABLE table_test.add_foo1 ADD COLUMNS foo_after String After id"); - List columns = getTableColumns("table_test.add_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.add_foo1 ADD COLUMNS foo_after String After id"); + List columns = getTableColumns("iceberg_rest_table_test.add_foo1"); Assertions.assertEquals(Arrays.asList("id", "foo_after", "data"), columns); - sql("ALTER TABLE table_test.add_foo1 ADD COLUMNS foo_last String"); - columns = getTableColumns("table_test.add_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.add_foo1 ADD COLUMNS foo_last String"); + columns = getTableColumns("iceberg_rest_table_test.add_foo1"); Assertions.assertEquals(Arrays.asList("id", "foo_after", "data", "foo_last"), columns); - sql("ALTER TABLE table_test.add_foo1 ADD COLUMNS foo_first String FIRST"); - columns = getTableColumns("table_test.add_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.add_foo1 ADD COLUMNS foo_first String FIRST"); + columns = getTableColumns("iceberg_rest_table_test.add_foo1"); Assertions.assertEquals( Arrays.asList("foo_first", "id", "foo_after", "data", "foo_last"), columns); } @@ -199,10 +294,11 @@ void testAddColumns() { @Test void testRenameColumns() { sql( - "CREATE TABLE table_test.renameC_foo1 (id bigint COMMENT 'unique id',data string) using iceberg"); - sql("ALTER TABLE table_test.renameC_foo1 RENAME COLUMN data TO data1"); + "CREATE TABLE iceberg_rest_table_test.renameC_foo1" + + " (id bigint COMMENT 'unique id',data string) using iceberg"); + sql("ALTER TABLE iceberg_rest_table_test.renameC_foo1 RENAME COLUMN data TO data1"); - Map tableInfo = getTableInfo("table_test.renameC_foo1"); + Map tableInfo = getTableInfo("iceberg_rest_table_test.renameC_foo1"); Map m = ImmutableMap.of( "id", "bigint", @@ -214,14 +310,15 @@ void testRenameColumns() { @Test void testDropColumns() { sql( - "CREATE TABLE table_test.dropC_foo1 (id bigint COMMENT 'unique id',data string) using iceberg"); + "CREATE TABLE iceberg_rest_table_test.dropC_foo1 " + + "(id bigint COMMENT 'unique id',data string) using iceberg"); Assertions.assertThrowsExactly( AnalysisException.class, - () -> sql("ALTER TABLE table_test.dropC_foo1 DROP COLUMNS not_exits")); + () -> sql("ALTER TABLE iceberg_rest_table_test.dropC_foo1 DROP COLUMNS not_exits")); - sql("ALTER TABLE table_test.dropC_foo1 DROP COLUMNS data"); - Map tableInfo = getTableInfo("table_test.dropC_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.dropC_foo1 DROP COLUMNS data"); + Map tableInfo = getTableInfo("iceberg_rest_table_test.dropC_foo1"); Map m = ImmutableMap.of("id", "bigint"); checkMapContains(m, tableInfo); Assertions.assertFalse(m.containsKey("data")); @@ -230,13 +327,14 @@ void testDropColumns() { @Test void testUpdateColumnType() { sql( - "CREATE TABLE table_test.updateC_foo1 (id int COMMENT 'unique id',data string) using iceberg"); - Map tableInfo = getTableInfo("table_test.updateC_foo1"); + "CREATE TABLE iceberg_rest_table_test.updateC_foo1 " + + "(id int COMMENT 'unique id',data string) using iceberg"); + Map tableInfo = getTableInfo("iceberg_rest_table_test.updateC_foo1"); Map m = ImmutableMap.of("id", "int"); checkMapContains(m, tableInfo); - sql("ALTER TABLE table_test.updateC_foo1 ALTER COLUMN id TYPE bigint"); - tableInfo = getTableInfo("table_test.updateC_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.updateC_foo1 ALTER COLUMN id TYPE bigint"); + tableInfo = getTableInfo("iceberg_rest_table_test.updateC_foo1"); m = ImmutableMap.of("id", "bigint"); checkMapContains(m, tableInfo); } @@ -244,27 +342,30 @@ void testUpdateColumnType() { @Test void testUpdateColumnPosition() { sql( - "CREATE TABLE table_test.updateP_foo1 (id int COMMENT 'unique id',data string) using iceberg"); - List columns = getTableColumns("table_test.updateP_foo1"); + "CREATE TABLE iceberg_rest_table_test.updateP_foo1 " + + "(id string COMMENT 'unique id',data string) using iceberg"); + List columns = getTableColumns("iceberg_rest_table_test.updateP_foo1"); Assertions.assertEquals(Arrays.asList("id", "data"), columns); - sql("ALTER TABLE table_test.updateP_foo1 ALTER COLUMN id AFTER data"); - columns = getTableColumns("table_test.updateP_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.updateP_foo1 ALTER COLUMN id AFTER data"); + columns = getTableColumns("iceberg_rest_table_test.updateP_foo1"); Assertions.assertEquals(Arrays.asList("data", "id"), columns); - sql("ALTER TABLE table_test.updateP_foo1 ALTER COLUMN id FIRST"); - columns = getTableColumns("table_test.updateP_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.updateP_foo1 ALTER COLUMN id FIRST"); + columns = getTableColumns("iceberg_rest_table_test.updateP_foo1"); Assertions.assertEquals(Arrays.asList("id", "data"), columns); } @Test void testAlterPartitions() { - sql("CREATE TABLE table_test.part_foo1( id bigint, data string, ts timestamp) USING iceberg"); - sql("ALTER TABLE table_test.part_foo1 ADD PARTITION FIELD bucket(16, id)"); - sql("ALTER TABLE table_test.part_foo1 ADD PARTITION FIELD truncate(4, data)"); - sql("ALTER TABLE table_test.part_foo1 ADD PARTITION FIELD years(ts)"); + sql( + "CREATE TABLE iceberg_rest_table_test.part_foo1" + + "( id bigint, data string, ts timestamp) USING iceberg"); + sql("ALTER TABLE iceberg_rest_table_test.part_foo1 ADD PARTITION FIELD bucket(16, id)"); + sql("ALTER TABLE iceberg_rest_table_test.part_foo1 ADD PARTITION FIELD truncate(4, data)"); + sql("ALTER TABLE iceberg_rest_table_test.part_foo1 ADD PARTITION FIELD years(ts)"); - Map tableInfo = getTableInfo("table_test.part_foo1"); + Map tableInfo = getTableInfo("iceberg_rest_table_test.part_foo1"); Map partitions = ImmutableMap.of( "Part 0", "bucket(16, id)", @@ -275,9 +376,12 @@ void testAlterPartitions() { Assertions.assertThrowsExactly( IllegalArgumentException.class, - () -> sql("ALTER TABLE table_test.part_foo1 DROP PARTITION FIELD bucket(8, id)")); - sql("ALTER TABLE table_test.part_foo1 DROP PARTITION FIELD bucket(16, id)"); - tableInfo = getTableInfo("table_test.part_foo1"); + () -> + sql( + "ALTER TABLE iceberg_rest_table_test.part_foo1 " + + "DROP PARTITION FIELD bucket(8, id)")); + sql("ALTER TABLE iceberg_rest_table_test.part_foo1 DROP PARTITION FIELD bucket(16, id)"); + tableInfo = getTableInfo("iceberg_rest_table_test.part_foo1"); partitions = ImmutableMap.of( "Part 0", "truncate(4, data)", @@ -289,9 +393,12 @@ void testAlterPartitions() { IllegalArgumentException.class, () -> sql( - "ALTER TABLE table_test.part_foo1 REPLACE PARTITION FIELD months(ts) WITH days(ts)")); - sql("ALTER TABLE table_test.part_foo1 REPLACE PARTITION FIELD years(ts) WITH days(ts)"); - tableInfo = getTableInfo("table_test.part_foo1"); + "ALTER TABLE iceberg_rest_table_test.part_foo1 " + + "REPLACE PARTITION FIELD months(ts) WITH days(ts)")); + sql( + "ALTER TABLE iceberg_rest_table_test.part_foo1 " + + "REPLACE PARTITION FIELD years(ts) WITH days(ts)"); + tableInfo = getTableInfo("iceberg_rest_table_test.part_foo1"); partitions = ImmutableMap.of( "Part 0", "truncate(4, data)", @@ -302,30 +409,35 @@ void testAlterPartitions() { @Test void testAlterSortBy() { - sql("CREATE TABLE table_test.sort_foo1( id bigint, data string, ts timestamp) USING iceberg"); + sql( + "CREATE TABLE iceberg_rest_table_test.sort_foo1" + + "( id bigint, data string, ts timestamp) USING iceberg"); Assertions.assertThrowsExactly( ValidationException.class, - () -> sql("ALTER TABLE table_test.sort_foo1 WRITE ORDERED BY xx, id")); + () -> sql("ALTER TABLE iceberg_rest_table_test.sort_foo1 WRITE ORDERED BY xx, id")); sql( - "ALTER TABLE table_test.sort_foo1 WRITE ORDERED BY data ASC NULLS FIRST, id ASC NULLS FIRST"); - Map tableInfo = getTableInfo("table_test.sort_foo1"); + "ALTER TABLE iceberg_rest_table_test.sort_foo1 " + + "WRITE ORDERED BY data ASC NULLS FIRST, id ASC NULLS FIRST"); + Map tableInfo = getTableInfo("iceberg_rest_table_test.sort_foo1"); Assertions.assertTrue( tableInfo .get("Table Properties") .contains("sort-order=data ASC NULLS FIRST, id ASC NULLS FIRST,")); // replace with new one - sql("ALTER TABLE table_test.sort_foo1 WRITE ORDERED BY ts ASC NULLS FIRST"); - tableInfo = getTableInfo("table_test.sort_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.sort_foo1 WRITE ORDERED BY ts ASC NULLS FIRST"); + tableInfo = getTableInfo("iceberg_rest_table_test.sort_foo1"); Assertions.assertTrue( tableInfo.get("Table Properties").contains("sort-order=ts ASC NULLS FIRST,")); } @Test void testAlterPartitionBy() { - sql("CREATE TABLE table_test.partby_foo1( id bigint, data string, ts timestamp) USING iceberg"); - sql("ALTER TABLE table_test.partby_foo1 WRITE DISTRIBUTED BY PARTITION"); - Map tableInfo = getTableInfo("table_test.partby_foo1"); + sql( + "CREATE TABLE iceberg_rest_table_test.partby_foo1" + + "( id bigint, data string, ts timestamp) USING iceberg"); + sql("ALTER TABLE iceberg_rest_table_test.partby_foo1 WRITE DISTRIBUTED BY PARTITION"); + Map tableInfo = getTableInfo("iceberg_rest_table_test.partby_foo1"); Assertions.assertTrue( tableInfo.get("Table Properties").contains("write.distribution-mode=hash")); } @@ -333,34 +445,48 @@ void testAlterPartitionBy() { @Test void testAlterIdentifier() { sql( - "CREATE TABLE table_test.identifier_foo1( id bigint NOT NULL, data string, ts timestamp) USING iceberg"); - sql("ALTER TABLE table_test.identifier_foo1 SET IDENTIFIER FIELDS id"); - Map tableInfo = getTableInfo("table_test.identifier_foo1"); + "CREATE TABLE iceberg_rest_table_test.identifier_foo1" + + "( id bigint NOT NULL, data string, ts timestamp) USING iceberg"); + sql("ALTER TABLE iceberg_rest_table_test.identifier_foo1 SET IDENTIFIER FIELDS id"); + Map tableInfo = getTableInfo("iceberg_rest_table_test.identifier_foo1"); Assertions.assertTrue(tableInfo.get("Table Properties").contains("identifier-fields=[id]")); - sql("ALTER TABLE table_test.identifier_foo1 DROP IDENTIFIER FIELDS id"); - tableInfo = getTableInfo("table_test.identifier_foo1"); + sql("ALTER TABLE iceberg_rest_table_test.identifier_foo1 DROP IDENTIFIER FIELDS id"); + tableInfo = getTableInfo("iceberg_rest_table_test.identifier_foo1"); Assertions.assertFalse(tableInfo.get("Table Properties").contains("identifier-fields")); // java.lang.IllegalArgumentException: Cannot add field id as an identifier field: not a // required field Assertions.assertThrowsExactly( IllegalArgumentException.class, - () -> sql("ALTER TABLE table_test.identifier_foo1 SET IDENTIFIER FIELDS data")); + () -> + sql("ALTER TABLE iceberg_rest_table_test.identifier_foo1 SET IDENTIFIER FIELDS data")); } @Test - // todo: MemoryCatalog doesn't support snapshot operations, will be supported after hive catalog - // is merged + // MemoryCatalog doesn't support snapshot operations, error is: + // org.apache.iceberg.exceptions.NotFoundException: File does not exist: + // /tmp/iceberg_rest_table_test/snapshot_foo1/metadata/00002-c7516f8e-ef6b-406a-8d78-9dda825dd762.metadata.json + // sql("SELECT * FROM table_test.snapshot_foo1.snapshots"); + @EnabledIf("catalogTypeNotMemory") void testSnapshot() { sql( - "CREATE TABLE table_test.snapshot_foo1 (id bigint COMMENT 'unique id',data string) using iceberg"); - sql(" INSERT INTO table_test.snapshot_foo1 VALUES (1, 'a'), (2, 'b');"); - sql(" INSERT INTO table_test.snapshot_foo1 VALUES (3, 'c'), (4, 'd');"); - printObjects(sql("desc table_test.snapshot_foo1")); - - // org.apache.iceberg.exceptions.NotFoundException: File does not exist: - // /tmp/table_test/snapshot_foo1/metadata/00002-c7516f8e-ef6b-406a-8d78-9dda825dd762.metadata.json - // printObjects(sql("SELECT * FROM table_test.snapshot_foo1.snapshots")); + "CREATE TABLE iceberg_rest_table_test.snapshot_foo1 " + + "(id bigint COMMENT 'unique id',data string) using iceberg"); + sql(" INSERT INTO iceberg_rest_table_test.snapshot_foo1 VALUES (1, 'a'), (2, 'b');"); + sql(" INSERT INTO iceberg_rest_table_test.snapshot_foo1 VALUES (3, 'c'), (4, 'd');"); + List snapshots = + convertToStringList( + sql("SELECT * FROM iceberg_rest_table_test.snapshot_foo1.snapshots"), 1); + + Assertions.assertEquals(2, snapshots.size()); + String oldSnapshotId = snapshots.get(0); + sql( + String.format( + "CALL rest.system.rollback_to_snapshot('iceberg_rest_table_test.snapshot_foo1', %s)", + oldSnapshotId)); + Map result = + convertToStringMap(sql("select * from iceberg_rest_table_test.snapshot_foo1")); + Assertions.assertEquals(ImmutableMap.of("1", "a", "2", "b"), result); } } diff --git a/integration-test/src/test/java/com/datastrato/graviton/integration/test/util/AbstractIT.java b/integration-test/src/test/java/com/datastrato/graviton/integration/test/util/AbstractIT.java index 70e059eecf0..abadd975a54 100644 --- a/integration-test/src/test/java/com/datastrato/graviton/integration/test/util/AbstractIT.java +++ b/integration-test/src/test/java/com/datastrato/graviton/integration/test/util/AbstractIT.java @@ -10,10 +10,16 @@ import com.datastrato.graviton.Config; import com.datastrato.graviton.client.GravitonClient; import com.datastrato.graviton.integration.test.MiniGraviton; +import com.datastrato.graviton.integration.test.MiniGravitonContext; import com.datastrato.graviton.server.GravitonServer; import com.datastrato.graviton.server.ServerConfig; import com.datastrato.graviton.server.web.JettyServerConfig; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; import org.apache.commons.io.FileUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -27,27 +33,48 @@ public class AbstractIT { protected static GravitonClient client; private static MiniGraviton miniGraviton; - - private static final String TEST_MODE = "testMode"; - private static final String EMBEDDED_TEST_MODE = "embedded"; - protected static Config serverConfig; static String testMode = ""; + protected static Map customConfigs = new HashMap<>(); + + public static void registerCustomConfigs(Map configs) { + customConfigs.putAll(configs); + } + + private static void rewriteGravitonServerConfig() throws IOException { + if (customConfigs.isEmpty()) return; + + String gravitonHome = System.getenv("GRAVITON_HOME"); + + String tmpFileName = GravitonServer.CONF_FILE + ".tmp"; + Path tmpPath = Paths.get(gravitonHome, "conf", tmpFileName); + Files.deleteIfExists(tmpPath); + + Path configPath = Paths.get(gravitonHome, "conf", GravitonServer.CONF_FILE); + Files.move(configPath, tmpPath); + + ITUtils.rewriteConfigFile(tmpPath.toString(), configPath.toString(), customConfigs); + } + @BeforeAll public static void startIntegrationTest() throws Exception { testMode = - System.getProperty(TEST_MODE) == null ? EMBEDDED_TEST_MODE : System.getProperty(TEST_MODE); + System.getProperty(ITUtils.TEST_MODE) == null + ? ITUtils.EMBEDDED_TEST_MODE + : System.getProperty(ITUtils.TEST_MODE); LOG.info("Running Graviton Server in {} mode", testMode); serverConfig = new ServerConfig(); - if (testMode != null && testMode.equals(EMBEDDED_TEST_MODE)) { - miniGraviton = new MiniGraviton(); + if (testMode != null && testMode.equals(ITUtils.EMBEDDED_TEST_MODE)) { + MiniGravitonContext context = new MiniGravitonContext(customConfigs); + miniGraviton = new MiniGraviton(context); miniGraviton.start(); serverConfig = miniGraviton.getServerConfig(); } else { + rewriteGravitonServerConfig(); serverConfig.loadFromFile(GravitonServer.CONF_FILE); try { @@ -72,7 +99,7 @@ public static void stopIntegrationTest() throws IOException, InterruptedExceptio if (client != null) { client.close(); } - if (testMode != null && testMode.equals(EMBEDDED_TEST_MODE)) { + if (testMode != null && testMode.equals(ITUtils.EMBEDDED_TEST_MODE)) { miniGraviton.stop(); } else { GravitonITUtils.stopGravitonServer();