Skip to content

Commit

Permalink
[#411] test: support E2E tests for IcebergREST server
Browse files Browse the repository at this point in the history
  • Loading branch information
kiki authored and kiki committed Sep 21, 2023
1 parent 659449e commit 6d98e1e
Show file tree
Hide file tree
Showing 8 changed files with 575 additions and 5 deletions.
23 changes: 23 additions & 0 deletions catalog-lakehouse/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,29 @@ dependencies {
exclude("org.slf4j")
}

implementation(libs.iceberg.hive.metastore)

implementation(libs.hive2.metastore) {
exclude("org.apache.avro", "avro")
exclude("org.slf4j", "slf4j-log4j12")
exclude("org.pentaho") // missing dependency
exclude("org.apache.hbase")
exclude("org.apache.logging.log4j")
exclude("co.cask.tephra")
exclude("com.google.code.findbugs", "jsr305")
exclude("org.eclipse.jetty.aggregate","jetty-all")
exclude("org.eclipse.jetty.orbit","javax.servlet")
exclude("org.apache.parquet", "parquet-hadoop-bundle")
exclude("com.tdunning", "json")
exclude("javax.transaction", "transaction-api")
exclude("com.zaxxer","HikariCP")
}

implementation(libs.hadoop2.client) {
exclude("org.apache.avro", "avro")
exclude("org.slf4j", "slf4j-log4j12")
}

compileOnly(libs.lombok)
annotationProcessor(libs.lombok)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ private Server initServer(IcebergRESTConfig restConfig) {
config.register(IcebergObjectMapperProvider.class).register(JacksonFeature.class);
config.register(IcebergExceptionMapper.class);

IcebergTableOps icebergTableOps = new IcebergTableOps();
IcebergTableOps icebergTableOps = new IcebergTableOps(restConfig);
config.register(
new AbstractBinder() {
@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,11 @@ public class IcebergRESTConfig extends Config {
.version("0.1.0")
.intConf()
.createWithDefault(9001);

public static final ConfigEntry<String> CATALOG_IMPL =
new ConfigBuilder("catalogImpl")
.doc("Choose the implementation of the iceberg catalog")
.version("0.1.0")
.stringConf()
.createWithDefault("memory");
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
*/
package com.datastrato.graviton.catalog.lakehouse.iceberg.ops;

import com.datastrato.graviton.catalog.lakehouse.iceberg.IcebergRESTConfig;
import com.datastrato.graviton.catalog.lakehouse.iceberg.ops.IcebergTableOpsHelper.IcebergTableChange;
import com.datastrato.graviton.catalog.lakehouse.iceberg.utils.IcebergCatalogUtil;
import com.google.common.base.Preconditions;
Expand Down Expand Up @@ -35,10 +36,19 @@ public class IcebergTableOps {

protected Catalog catalog;
private SupportsNamespaces asNamespaceCatalog;
private final String DEFAULT_ICEBERG_CATALOG_TYPE = "memory";

public IcebergTableOps(IcebergRESTConfig icebergRESTConfig) {
catalog =
IcebergCatalogUtil.loadIcebergCatalog(
icebergRESTConfig.get(IcebergRESTConfig.CATALOG_IMPL));
if (catalog instanceof SupportsNamespaces) {
asNamespaceCatalog = (SupportsNamespaces) catalog;
}
}

public IcebergTableOps() {
catalog = IcebergCatalogUtil.loadIcebergCatalog(DEFAULT_ICEBERG_CATALOG_TYPE);
catalog =
IcebergCatalogUtil.loadIcebergCatalog(IcebergRESTConfig.CATALOG_IMPL.getDefaultValue());
if (catalog instanceof SupportsNamespaces) {
asNamespaceCatalog = (SupportsNamespaces) catalog;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@
import java.util.Locale;
import java.util.Map;
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.inmemory.InMemoryCatalog;
import org.apache.iceberg.jdbc.JdbcCatalog;

public class IcebergCatalogUtil {

Expand All @@ -20,11 +22,28 @@ private static InMemoryCatalog loadMemoryCatalog() {
return memoryCatalog;
}

private static HiveCatalog loadHiveCatalog() {
HiveCatalog hiveCatalog = new HiveCatalog();
Map<String, String> properties = new HashMap<>();
hiveCatalog.initialize("hive", properties);
return hiveCatalog;
}

private static JdbcCatalog loadJdbcCatalog() {
JdbcCatalog jdbcCatalog = new JdbcCatalog();
Map<String, String> properties = new HashMap<>();
jdbcCatalog.initialize("jdbc", properties);
return jdbcCatalog;
}

public static Catalog loadIcebergCatalog(String catalogType) {
switch (catalogType.toLowerCase(Locale.ENGLISH)) {
case "memory":
return loadMemoryCatalog();
// todo: add hive, jdbc catalog
case "hive":
return loadHiveCatalog();
case "jdbc":
return loadJdbcCatalog();
default:
throw new RuntimeException(
catalogType
Expand Down
3 changes: 3 additions & 0 deletions gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ hive2-metastore = { group = "org.apache.hive", name = "hive-metastore", version.
hive2-exec = { group = "org.apache.hive", name = "hive-exec", version.ref = "hive2"}
hive2-common = { group = "org.apache.hive", name = "hive-common", version.ref = "hive2"}
hadoop2-common = { group = "org.apache.hadoop", name = "hadoop-common", version.ref = "hadoop2"}
hadoop2-client = { module = "org.apache.hadoop:hadoop-client", version.ref = "hadoop2" }
hadoop2-mapreduce-client-core = { group = "org.apache.hadoop", name = "hadoop-mapreduce-client-core", version.ref = "hadoop2"}
airlift-units = { group = "io.airlift", name = "units", version.ref = "airlift-units"}
airlift-log = { group = "io.airlift", name = "log", version.ref = "airlift-log"}
Expand All @@ -80,6 +81,8 @@ caffeine = { group = "com.github.ben-manes.caffeine", name = "caffeine", version
rocksdbjni = { group = "org.rocksdb", name = "rocksdbjni", version.ref = "rocksdbjni" }
commons-collections4 = { group = "org.apache.commons", name = "commons-collections4", version.ref = "commons-collections4" }
iceberg-core = { group = "org.apache.iceberg", name = "iceberg-core", version.ref = "iceberg" }
iceberg-hive-metastore = { group = "org.apache.iceberg", name = "iceberg-hive-metastore", version.ref = "iceberg" }
iceberg-spark-runtime = { group = "org.apache.iceberg", name = "iceberg-spark-runtime-3.3_2.13", version.ref = "iceberg" }

[bundles]
log4j = ["slf4j-api", "log4j-slf4j2-impl", "log4j-api", "log4j-core", "log4j-12-api"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,12 @@ private void customizeConfigFile(String configTempFileName, String configFileNam
+ "."
+ IcebergRESTConfig.ICEBERG_REST_SERVER_HTTP_PORT.getKey(),
String.valueOf(RESTUtils.findAvailablePort(3000, 4000)));

configMap.put(
AuxiliaryServiceManager.GRAVITON_AUX_SERVICE_PREFIX
+ IcebergAuxiliaryService.SERVICE_NAME
+ "."
+ IcebergRESTConfig.CATALOG_IMPL.getKey(),
"hive");
Properties props = new Properties();

try (InputStream inputStream = Files.newInputStream(Paths.get(configTempFileName));
Expand Down
Loading

0 comments on commit 6d98e1e

Please sign in to comment.