From 5a88d1522877afafc4e8e6b3d98a4a00e332e47f Mon Sep 17 00:00:00 2001 From: Mohit Yadav <105265192+mohityadav766@users.noreply.github.com> Date: Sun, 7 Apr 2024 02:21:56 +0530 Subject: [PATCH] [Feature] Import/Export For Table, DatabaseSchema, Databases, Service (#15816) * - Add Import Export Separation for GlossaryTerms * - Fixed Table Resrouce Test * - Review Comment #2 * - GlossaryTestFix, Glossary does not allow Tier Tags * - Database Schema Tests Fix * - Create Database, DatabaseSchema, DatabaseService import entity if not exists * - Fix Test for Database DatabaseSchema, Table --- .../java/org/openmetadata/csv/CsvUtil.java | 40 +++++- .../java/org/openmetadata/csv/EntityCsv.java | 28 +++-- .../service/jdbi3/DatabaseRepository.java | 40 ++++-- .../jdbi3/DatabaseSchemaRepository.java | 39 ++++-- .../jdbi3/DatabaseServiceRepository.java | 115 ++++++++++++++++++ .../service/jdbi3/GlossaryRepository.java | 6 +- .../service/jdbi3/TableRepository.java | 115 +++++++++++++++--- .../database/DatabaseServiceResource.java | 60 +++++++++ .../database/databaseCsvDocumentation.json | 18 +++ .../databaseSchemaCsvDocumentation.json | 18 +++ .../databaseServiceCsvDocumentation.json | 73 +++++++++++ .../data/table/tableCsvDocumentation.json | 52 ++++++++ .../databases/DatabaseResourceTest.java | 28 +++-- .../databases/DatabaseSchemaResourceTest.java | 23 +++- .../databases/TableResourceTest.java | 31 +++-- .../glossary/GlossaryResourceTest.java | 10 +- 16 files changed, 615 insertions(+), 81 deletions(-) create mode 100644 openmetadata-service/src/main/resources/json/data/databaseService/databaseServiceCsvDocumentation.json diff --git a/openmetadata-service/src/main/java/org/openmetadata/csv/CsvUtil.java b/openmetadata-service/src/main/java/org/openmetadata/csv/CsvUtil.java index a3bc67659ccd..c6fd9fe19ce8 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/csv/CsvUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/csv/CsvUtil.java @@ -132,7 +132,45 @@ public static List addTagLabels(List csvRecord, List t csvRecord.add( nullOrEmpty(tags) ? null - : tags.stream().map(TagLabel::getTagFQN).collect(Collectors.joining(FIELD_SEPARATOR))); + : tags.stream() + .filter( + tagLabel -> + tagLabel.getSource().equals(TagLabel.TagSource.CLASSIFICATION) + && !tagLabel.getTagFQN().split("\\.")[0].equals("Tier") + && !tagLabel.getLabelType().equals(TagLabel.LabelType.DERIVED)) + .map(TagLabel::getTagFQN) + .collect(Collectors.joining(FIELD_SEPARATOR))); + + return csvRecord; + } + + public static List addGlossaryTerms(List csvRecord, List tags) { + csvRecord.add( + nullOrEmpty(tags) + ? null + : tags.stream() + .filter( + tagLabel -> + tagLabel.getSource().equals(TagLabel.TagSource.GLOSSARY) + && !tagLabel.getTagFQN().split("\\.")[0].equals("Tier")) + .map(TagLabel::getTagFQN) + .collect(Collectors.joining(FIELD_SEPARATOR))); + + return csvRecord; + } + + public static List addTagTiers(List csvRecord, List tags) { + csvRecord.add( + nullOrEmpty(tags) + ? null + : tags.stream() + .filter( + tagLabel -> + tagLabel.getSource().equals(TagLabel.TagSource.CLASSIFICATION) + && tagLabel.getTagFQN().split("\\.")[0].equals("Tier")) + .map(TagLabel::getTagFQN) + .collect(Collectors.joining(FIELD_SEPARATOR))); + return csvRecord; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/csv/EntityCsv.java b/openmetadata-service/src/main/java/org/openmetadata/csv/EntityCsv.java index 7454e9139b06..fe31b7e4c1ec 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/csv/EntityCsv.java +++ b/openmetadata-service/src/main/java/org/openmetadata/csv/EntityCsv.java @@ -34,6 +34,7 @@ import org.apache.commons.csv.CSVFormat.Builder; import org.apache.commons.csv.CSVPrinter; import org.apache.commons.csv.CSVRecord; +import org.apache.commons.lang3.tuple.Pair; import org.jdbi.v3.sqlobject.transaction.Transaction; import org.openmetadata.common.utils.CommonUtil; import org.openmetadata.schema.EntityInterface; @@ -275,20 +276,26 @@ protected final List getEntityReferences( } protected final List getTagLabels( - CSVPrinter printer, CSVRecord csvRecord, int fieldNumber) throws IOException { + CSVPrinter printer, + CSVRecord csvRecord, + List> fieldNumbersWithSource) + throws IOException { if (!processRecord) { return null; } - List refs = getEntityReferences(printer, csvRecord, fieldNumber, Entity.TAG); - if (!processRecord || nullOrEmpty(refs)) { - return null; - } List tagLabels = new ArrayList<>(); - for (EntityReference ref : refs) { - tagLabels.add( - new TagLabel() - .withSource(TagSource.CLASSIFICATION) - .withTagFQN(ref.getFullyQualifiedName())); + for (Pair pair : fieldNumbersWithSource) { + int fieldNumbers = pair.getLeft(); + TagSource source = pair.getRight(); + List refs = + source == TagSource.CLASSIFICATION + ? getEntityReferences(printer, csvRecord, fieldNumbers, Entity.TAG) + : getEntityReferences(printer, csvRecord, fieldNumbers, Entity.GLOSSARY_TERM); + if (processRecord && !nullOrEmpty(refs)) { + for (EntityReference ref : refs) { + tagLabels.add(new TagLabel().withSource(source).withTagFQN(ref.getFullyQualifiedName())); + } + } } return tagLabels; } @@ -391,6 +398,7 @@ protected void createEntity(CSVPrinter resultsPrinter, CSVRecord csvRecord, T en responseStatus = response.getStatus(); } catch (Exception ex) { importFailure(resultsPrinter, ex.getMessage(), csvRecord); + importResult.setStatus(ApiStatus.FAILURE); return; } } else { // Dry run don't create the entity diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java index 10a801f17c62..23845de329fa 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java @@ -14,8 +14,10 @@ package org.openmetadata.service.jdbi3; import static org.openmetadata.csv.CsvUtil.addField; +import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; import static org.openmetadata.csv.CsvUtil.addOwner; import static org.openmetadata.csv.CsvUtil.addTagLabels; +import static org.openmetadata.csv.CsvUtil.addTagTiers; import static org.openmetadata.service.Entity.DATABASE_SCHEMA; import java.io.IOException; @@ -26,6 +28,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.csv.CSVPrinter; import org.apache.commons.csv.CSVRecord; +import org.apache.commons.lang3.tuple.Pair; import org.jdbi.v3.sqlobject.transaction.Transaction; import org.openmetadata.csv.EntityCsv; import org.openmetadata.schema.EntityInterface; @@ -36,6 +39,7 @@ import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.Relationship; +import org.openmetadata.schema.type.TagLabel; import org.openmetadata.schema.type.csv.CsvDocumentation; import org.openmetadata.schema.type.csv.CsvFile; import org.openmetadata.schema.type.csv.CsvHeader; @@ -116,7 +120,12 @@ public String exportToCsv(String name, String user) throws IOException { @Override public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) throws IOException { - Database database = getByName(null, name, Fields.EMPTY_FIELDS); // Validate glossary name + Database database = + getByName( + null, + name, + getFields( + "service")); // Validate glossary name, and get service needed in case of create DatabaseCsv databaseCsv = new DatabaseCsv(database, user); return databaseCsv.importCsv(csv, dryRun); } @@ -234,22 +243,33 @@ protected void createEntity(CSVPrinter printer, List csvRecords) thro try { schema = Entity.getEntityByName(DATABASE_SCHEMA, schemaFqn, "*", Include.NON_DELETED); } catch (Exception ex) { - importFailure(printer, entityNotFound(0, DATABASE_SCHEMA, schemaFqn), csvRecord); - processRecord = false; - return; + LOG.warn("Database Schema not found: {}, it will be created with Import.", schemaFqn); + schema = + new DatabaseSchema() + .withDatabase(database.getEntityReference()) + .withService(database.getService()); } - // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain + // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod, + // sourceUrl, domain // Field 1,2,3,6,7 - database schema name, displayName, description + List tagLabels = + getTagLabels( + printer, + csvRecord, + List.of( + Pair.of(4, TagLabel.TagSource.CLASSIFICATION), + Pair.of(5, TagLabel.TagSource.GLOSSARY), + Pair.of(6, TagLabel.TagSource.CLASSIFICATION))); schema .withName(csvRecord.get(0)) .withDisplayName(csvRecord.get(1)) .withDescription(csvRecord.get(2)) .withOwner(getOwner(printer, csvRecord, 3)) - .withTags(getTagLabels(printer, csvRecord, 4)) - .withRetentionPeriod(csvRecord.get(5)) - .withSourceUrl(csvRecord.get(6)) - .withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN)); + .withTags(tagLabels) + .withRetentionPeriod(csvRecord.get(7)) + .withSourceUrl(csvRecord.get(8)) + .withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN)); if (processRecord) { createEntity(printer, csvRecord, schema); } @@ -264,6 +284,8 @@ protected void addRecord(CsvFile csvFile, DatabaseSchema entity) { addField(recordList, entity.getDescription()); addOwner(recordList, entity.getOwner()); addTagLabels(recordList, entity.getTags()); + addGlossaryTerms(recordList, entity.getTags()); + addTagTiers(recordList, entity.getTags()); addField(recordList, entity.getRetentionPeriod()); addField(recordList, entity.getSourceUrl()); String domain = diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java index a48800b3f2e0..73653639b645 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java @@ -14,8 +14,10 @@ package org.openmetadata.service.jdbi3; import static org.openmetadata.csv.CsvUtil.addField; +import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; import static org.openmetadata.csv.CsvUtil.addOwner; import static org.openmetadata.csv.CsvUtil.addTagLabels; +import static org.openmetadata.csv.CsvUtil.addTagTiers; import static org.openmetadata.schema.type.Include.ALL; import static org.openmetadata.service.Entity.DATABASE_SCHEMA; import static org.openmetadata.service.Entity.TABLE; @@ -29,6 +31,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.csv.CSVPrinter; import org.apache.commons.csv.CSVRecord; +import org.apache.commons.lang3.tuple.Pair; import org.jdbi.v3.sqlobject.transaction.Transaction; import org.openmetadata.csv.EntityCsv; import org.openmetadata.schema.EntityInterface; @@ -39,6 +42,7 @@ import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.Relationship; +import org.openmetadata.schema.type.TagLabel; import org.openmetadata.schema.type.csv.CsvDocumentation; import org.openmetadata.schema.type.csv.CsvFile; import org.openmetadata.schema.type.csv.CsvHeader; @@ -188,7 +192,8 @@ public String exportToCsv(String name, String user) throws IOException { @Override public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) throws IOException { - DatabaseSchema schema = getByName(null, name, Fields.EMPTY_FIELDS); // Validate database schema + DatabaseSchema schema = + getByName(null, name, getFields("database,service")); // Validate database schema return new DatabaseSchemaCsv(schema, user).importCsv(csv, dryRun); } @@ -266,21 +271,35 @@ protected void createEntity(CSVPrinter printer, List csvRecords) thro try { table = Entity.getEntityByName(TABLE, tableFqn, "*", Include.NON_DELETED); } catch (Exception ex) { - importFailure(printer, entityNotFound(0, TABLE, tableFqn), csvRecord); - processRecord = false; - return; + LOG.warn("Table not found: {}, it will be created with Import.", tableFqn); + table = + new Table() + .withService(schema.getService()) + .withDatabase(schema.getDatabase()) + .withDatabaseSchema(schema.getEntityReference()); } - // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain + // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod, + // sourceUrl, domain // Field 1,2,3,6,7 - database schema name, displayName, description + List tagLabels = + getTagLabels( + printer, + csvRecord, + List.of( + Pair.of(4, TagLabel.TagSource.CLASSIFICATION), + Pair.of(5, TagLabel.TagSource.GLOSSARY), + Pair.of(6, TagLabel.TagSource.CLASSIFICATION))); table + .withName(csvRecord.get(0)) .withDisplayName(csvRecord.get(1)) .withDescription(csvRecord.get(2)) .withOwner(getOwner(printer, csvRecord, 3)) - .withTags(getTagLabels(printer, csvRecord, 4)) - .withRetentionPeriod(csvRecord.get(5)) - .withSourceUrl(csvRecord.get(6)) - .withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN)); + .withTags(tagLabels) + .withRetentionPeriod(csvRecord.get(7)) + .withSourceUrl(csvRecord.get(8)) + .withColumns(new ArrayList<>()) + .withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN)); if (processRecord) { createEntity(printer, csvRecord, table); @@ -296,6 +315,8 @@ protected void addRecord(CsvFile csvFile, Table entity) { addField(recordList, entity.getDescription()); addOwner(recordList, entity.getOwner()); addTagLabels(recordList, entity.getTags()); + addGlossaryTerms(recordList, entity.getTags()); + addTagTiers(recordList, entity.getTags()); addField(recordList, entity.getRetentionPeriod()); addField(recordList, entity.getSourceUrl()); String domain = diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseServiceRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseServiceRepository.java index e2f05dfef4c6..9fb10b2ce177 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseServiceRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseServiceRepository.java @@ -13,12 +13,39 @@ package org.openmetadata.service.jdbi3; +import static org.openmetadata.csv.CsvUtil.addField; +import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; +import static org.openmetadata.csv.CsvUtil.addOwner; +import static org.openmetadata.csv.CsvUtil.addTagLabels; +import static org.openmetadata.csv.CsvUtil.addTagTiers; +import static org.openmetadata.service.Entity.DATABASE; +import static org.openmetadata.service.Entity.DATABASE_SERVICE; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.csv.CSVPrinter; +import org.apache.commons.csv.CSVRecord; +import org.apache.commons.lang3.tuple.Pair; +import org.openmetadata.csv.EntityCsv; +import org.openmetadata.schema.EntityInterface; import org.openmetadata.schema.api.services.DatabaseConnection; +import org.openmetadata.schema.entity.data.Database; import org.openmetadata.schema.entity.services.DatabaseService; import org.openmetadata.schema.entity.services.ServiceType; +import org.openmetadata.schema.type.Include; +import org.openmetadata.schema.type.TagLabel; +import org.openmetadata.schema.type.csv.CsvDocumentation; +import org.openmetadata.schema.type.csv.CsvFile; +import org.openmetadata.schema.type.csv.CsvHeader; +import org.openmetadata.schema.type.csv.CsvImportResult; import org.openmetadata.service.Entity; +import org.openmetadata.service.exception.EntityNotFoundException; import org.openmetadata.service.resources.services.database.DatabaseServiceResource; +import org.openmetadata.service.util.EntityUtil; +import org.openmetadata.service.util.FullyQualifiedName; @Slf4j public class DatabaseServiceRepository @@ -33,4 +60,92 @@ public DatabaseServiceRepository() { ServiceType.DATABASE); supportsSearch = true; } + + @Override + public String exportToCsv(String name, String user) throws IOException { + DatabaseService databaseService = + getByName(null, name, EntityUtil.Fields.EMPTY_FIELDS); // Validate database name + DatabaseRepository repository = (DatabaseRepository) Entity.getEntityRepository(DATABASE); + ListFilter filter = new ListFilter(Include.NON_DELETED).addQueryParam("service", name); + List databases = + repository.listAll(repository.getFields("owner,tags,domain"), filter); + databases.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName)); + return new DatabaseServiceCsv(databaseService, user).exportCsv(databases); + } + + @Override + public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) + throws IOException { + // Validate database service + DatabaseService databaseService = + getByName(null, name, EntityUtil.Fields.EMPTY_FIELDS); // Validate glossary name + DatabaseServiceCsv databaseServiceCsv = new DatabaseServiceCsv(databaseService, user); + return databaseServiceCsv.importCsv(csv, dryRun); + } + + public static class DatabaseServiceCsv extends EntityCsv { + public static final CsvDocumentation DOCUMENTATION = getCsvDocumentation(DATABASE_SERVICE); + public static final List HEADERS = DOCUMENTATION.getHeaders(); + private final DatabaseService service; + + DatabaseServiceCsv(DatabaseService service, String user) { + super(DATABASE, DOCUMENTATION.getHeaders(), user); + this.service = service; + } + + @Override + protected void createEntity(CSVPrinter printer, List csvRecords) throws IOException { + CSVRecord csvRecord = getNextRecord(printer, csvRecords); + String databaseFqn = + FullyQualifiedName.add(service.getFullyQualifiedName(), csvRecord.get(0)); + Database database; + try { + database = Entity.getEntityByName(DATABASE, databaseFqn, "*", Include.NON_DELETED); + } catch (EntityNotFoundException ex) { + LOG.warn("Database not found: {}, it will be created with Import.", databaseFqn); + database = new Database().withService(service.getEntityReference()); + } + + // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, domain + // Field 1,2,3,6,7 - database service name, displayName, description + List tagLabels = + getTagLabels( + printer, + csvRecord, + List.of( + Pair.of(4, TagLabel.TagSource.CLASSIFICATION), + Pair.of(5, TagLabel.TagSource.GLOSSARY), + Pair.of(6, TagLabel.TagSource.CLASSIFICATION))); + database + .withName(csvRecord.get(0)) + .withDisplayName(csvRecord.get(1)) + .withDescription(csvRecord.get(2)) + .withOwner(getOwner(printer, csvRecord, 3)) + .withTags(tagLabels) + .withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN)); + + if (processRecord) { + createEntity(printer, csvRecord, database); + } + } + + @Override + protected void addRecord(CsvFile csvFile, Database entity) { + // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, domain + List recordList = new ArrayList<>(); + addField(recordList, entity.getName()); + addField(recordList, entity.getDisplayName()); + addField(recordList, entity.getDescription()); + addOwner(recordList, entity.getOwner()); + addTagLabels(recordList, entity.getTags()); + addGlossaryTerms(recordList, entity.getTags()); + addTagTiers(recordList, entity.getTags()); + String domain = + entity.getDomain() == null || Boolean.TRUE.equals(entity.getDomain().getInherited()) + ? "" + : entity.getDomain().getFullyQualifiedName(); + addField(recordList, domain); + addRecord(csvFile, recordList); + } + } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryRepository.java index b6ba62393f95..cb33c1a7e7c8 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryRepository.java @@ -38,6 +38,7 @@ import org.apache.commons.csv.CSVPrinter; import org.apache.commons.csv.CSVRecord; import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; import org.jdbi.v3.sqlobject.transaction.Transaction; import org.openmetadata.csv.CsvUtil; import org.openmetadata.csv.EntityCsv; @@ -50,6 +51,7 @@ import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.ProviderType; import org.openmetadata.schema.type.Relationship; +import org.openmetadata.schema.type.TagLabel; import org.openmetadata.schema.type.TagLabel.TagSource; import org.openmetadata.schema.type.csv.CsvDocumentation; import org.openmetadata.schema.type.csv.CsvFile; @@ -179,7 +181,9 @@ protected void createEntity(CSVPrinter printer, List csvRecords) thro .withSynonyms(CsvUtil.fieldToStrings(csvRecord.get(4))) .withRelatedTerms(getEntityReferences(printer, csvRecord, 5, GLOSSARY_TERM)) .withReferences(getTermReferences(printer, csvRecord)) - .withTags(getTagLabels(printer, csvRecord, 7)) + .withTags( + getTagLabels( + printer, csvRecord, List.of(Pair.of(7, TagLabel.TagSource.CLASSIFICATION)))) .withReviewers(getEntityReferences(printer, csvRecord, 8, Entity.USER)) .withOwner(getOwner(printer, csvRecord, 9)) .withStatus(getTermStatus(printer, csvRecord)); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java index 3c2a457bb100..db486375f342 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java @@ -16,9 +16,12 @@ import static java.util.stream.Collectors.groupingBy; import static org.openmetadata.common.utils.CommonUtil.listOf; import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; +import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import static org.openmetadata.csv.CsvUtil.addField; +import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; import static org.openmetadata.csv.CsvUtil.addOwner; import static org.openmetadata.csv.CsvUtil.addTagLabels; +import static org.openmetadata.csv.CsvUtil.addTagTiers; import static org.openmetadata.schema.type.Include.ALL; import static org.openmetadata.schema.type.Include.NON_DELETED; import static org.openmetadata.service.Entity.DATABASE_SCHEMA; @@ -35,6 +38,7 @@ import com.google.common.collect.Streams; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; @@ -62,6 +66,7 @@ import org.openmetadata.schema.tests.CustomMetric; import org.openmetadata.schema.tests.TestSuite; import org.openmetadata.schema.type.Column; +import org.openmetadata.schema.type.ColumnDataType; import org.openmetadata.schema.type.ColumnJoin; import org.openmetadata.schema.type.ColumnProfile; import org.openmetadata.schema.type.ColumnProfilerConfig; @@ -774,7 +779,11 @@ public String exportToCsv(String name, String user) throws IOException { public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) throws IOException { // Validate table - Table table = getByName(null, name, new Fields(allowedFields, "owner,domain,tags,columns")); + Table table = + getByName( + null, + name, + new Fields(allowedFields, "owner,domain,tags,columns,database,service,databaseSchema")); return new TableCsv(table, user).importCsv(csv, dryRun); } @@ -1157,19 +1166,29 @@ public static class TableCsv extends EntityCsv { @Override protected void createEntity(CSVPrinter printer, List csvRecords) throws IOException { CSVRecord csvRecord = getNextRecord(printer, csvRecords); - // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain - // column.fullyQualifiedName, column.displayName, column.description, column.dataTypeDisplay, - // column.tags + // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod, + // sourceUrl, domain, column.fullyQualifiedName, column.displayName, column.description, + // column.dataTypeDisplay, + // column.tags, column.glossaryTerms if (processRecord) { + // fields tags(4), glossaryTerms(5), tiers(6) + List tagLabels = + getTagLabels( + printer, + csvRecord, + List.of( + Pair.of(4, TagLabel.TagSource.CLASSIFICATION), + Pair.of(5, TagLabel.TagSource.GLOSSARY), + Pair.of(6, TagLabel.TagSource.CLASSIFICATION))); table .withName(csvRecord.get(0)) .withDisplayName(csvRecord.get(1)) .withDescription(csvRecord.get(2)) .withOwner(getOwner(printer, csvRecord, 3)) - .withTags(getTagLabels(printer, csvRecord, 4)) - .withRetentionPeriod(csvRecord.get(5)) - .withSourceUrl(csvRecord.get(6)) - .withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN)); + .withTags(tagLabels != null && tagLabels.isEmpty() ? null : tagLabels) + .withRetentionPeriod(csvRecord.get(7)) + .withSourceUrl(csvRecord.get(8)) + .withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN)); ImportResult importResult = updateColumn(printer, csvRecord); if (importResult.result().equals(IMPORT_FAILED)) { importFailure(printer, importResult.details(), csvRecord); @@ -1202,16 +1221,71 @@ public ImportResult updateColumn(CSVPrinter printer, CSVRecord csvRecord) throws if (!processRecord) { return new ImportResult(IMPORT_SKIPPED, csvRecord, ""); } - String columnFqn = csvRecord.get(8); + String columnFqn = csvRecord.get(10); Column column = findColumn(table.getColumns(), columnFqn); + boolean columnExists = column != null; if (column == null) { - processRecord = false; - return new ImportResult(IMPORT_FAILED, csvRecord, columnNotFound(8, columnFqn)); + // Create Column, if not found + column = + new Column() + .withName(getLocalColumnName(table.getFullyQualifiedName(), columnFqn)) + .withFullyQualifiedName( + table.getFullyQualifiedName() + Entity.SEPARATOR + columnFqn); } - column.withDisplayName(csvRecord.get(9)); - column.withDescription(csvRecord.get(10)); - column.withDataTypeDisplay(csvRecord.get(11)); - column.withTags(getTagLabels(printer, csvRecord, 12)); + column.withDisplayName(csvRecord.get(11)); + column.withDescription(csvRecord.get(12)); + column.withDataTypeDisplay(csvRecord.get(13)); + column.withDataType( + nullOrEmpty(csvRecord.get(14)) ? null : ColumnDataType.fromValue(csvRecord.get(14))); + column.withArrayDataType( + nullOrEmpty(csvRecord.get(15)) ? null : ColumnDataType.fromValue(csvRecord.get(15))); + column.withDataLength( + nullOrEmpty(csvRecord.get(16)) ? null : Integer.parseInt(csvRecord.get(16))); + List tagLabels = + getTagLabels( + printer, + csvRecord, + List.of( + Pair.of(17, TagLabel.TagSource.CLASSIFICATION), + Pair.of(18, TagLabel.TagSource.GLOSSARY))); + column.withTags(nullOrEmpty(tagLabels) ? null : tagLabels); + column.withOrdinalPosition(nullOrEmpty(table.getColumns()) ? 0 : table.getColumns().size()); + + // If Column Does not Exist add it to the table + if (!columnExists) { + String[] splitColumnName = FullyQualifiedName.split(columnFqn); + // Parent Column + if (splitColumnName.length == 1) { + List tableColumns = + table.getColumns() == null ? new ArrayList<>() : table.getColumns(); + tableColumns.add(column); + table.withColumns(tableColumns); + } else { + String parentColumnFqn = + String.join( + Entity.SEPARATOR, Arrays.copyOf(splitColumnName, splitColumnName.length - 1)); + Column parentColumn = findColumn(table.getColumns(), parentColumnFqn); + if (parentColumn == null) { + return new ImportResult( + IMPORT_FAILED, + csvRecord, + "Parent Column not found. Check the order of the columns in the CSV file."); + } + + // Update Name And Ordinal position in the parent column + column.withName(splitColumnName[splitColumnName.length - 1]); + column.withOrdinalPosition( + nullOrEmpty(parentColumn.getChildren()) ? 0 : parentColumn.getChildren().size()); + // Add this column to children of Parent + List children = + nullOrEmpty(parentColumn.getChildren()) + ? new ArrayList<>() + : parentColumn.getChildren(); + children.add(column); + parentColumn.withChildren(children); + } + } + return new ImportResult(IMPORT_SUCCESS, csvRecord, ENTITY_UPDATED); } @@ -1226,6 +1300,8 @@ protected void addRecord(CsvFile csvFile, Table entity) { addField(recordList, entity.getDescription()); addOwner(recordList, entity.getOwner()); addTagLabels(recordList, entity.getTags()); + addGlossaryTerms(recordList, entity.getTags()); + addTagTiers(recordList, entity.getTags()); addField(recordList, entity.getRetentionPeriod()); addField(recordList, entity.getSourceUrl()); String domain = @@ -1243,7 +1319,7 @@ protected void addRecord(CsvFile csvFile, Table entity) { private void addRecord( CsvFile csvFile, List recordList, Column column, boolean emptyTableDetails) { if (emptyTableDetails) { - for (int i = 0; i < 8; i++) { + for (int i = 0; i < 10; i++) { addField(recordList, (String) null); // Add empty fields for table information } } @@ -1253,7 +1329,14 @@ private void addRecord( addField(recordList, column.getDisplayName()); addField(recordList, column.getDescription()); addField(recordList, column.getDataTypeDisplay()); + addField(recordList, column.getDataType() == null ? null : column.getDataType().value()); + addField( + recordList, column.getArrayDataType() == null ? null : column.getArrayDataType().value()); + addField( + recordList, + column.getDataLength() == null ? null : String.valueOf(column.getDataLength())); addTagLabels(recordList, column.getTags()); + addGlossaryTerms(recordList, column.getTags()); addRecord(csvFile, recordList); listOrEmpty(column.getChildren()) .forEach(c -> addRecord(csvFile, new ArrayList<>(), c, true)); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/database/DatabaseServiceResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/database/DatabaseServiceResource.java index a8cb94e9da42..7cb3cbfb9ee6 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/database/DatabaseServiceResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/database/DatabaseServiceResource.java @@ -22,6 +22,7 @@ import io.swagger.v3.oas.annotations.parameters.RequestBody; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.tags.Tag; +import java.io.IOException; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; @@ -55,6 +56,7 @@ import org.openmetadata.schema.type.EntityHistory; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.MetadataOperation; +import org.openmetadata.schema.type.csv.CsvImportResult; import org.openmetadata.service.Entity; import org.openmetadata.service.jdbi3.DatabaseServiceRepository; import org.openmetadata.service.resources.Collection; @@ -408,6 +410,64 @@ public Response patch( return patchInternal(uriInfo, securityContext, id, patch); } + @GET + @Path("/name/{name}/export") + @Produces(MediaType.TEXT_PLAIN) + @Valid + @Operation( + operationId = "exportDatabaseServices", + summary = "Export database service in CSV format", + responses = { + @ApiResponse( + responseCode = "200", + description = "Exported csv with services from the database services", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = String.class))) + }) + public String exportCsv( + @Context SecurityContext securityContext, + @Parameter(description = "Name of the Database Service", schema = @Schema(type = "string")) + @PathParam("name") + String name) + throws IOException { + return exportCsvInternal(securityContext, name); + } + + @PUT + @Path("/name/{name}/import") + @Consumes(MediaType.TEXT_PLAIN) + @Valid + @Operation( + operationId = "importDatabaseService", + summary = "Import service from CSV to update database service (no creation allowed)", + responses = { + @ApiResponse( + responseCode = "200", + description = "Import result", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = CsvImportResult.class))) + }) + public CsvImportResult importCsv( + @Context SecurityContext securityContext, + @Parameter(description = "Name of the Database Service", schema = @Schema(type = "string")) + @PathParam("name") + String name, + @Parameter( + description = + "Dry-run when true is used for validating the CSV without really importing it. (default=true)", + schema = @Schema(type = "boolean")) + @DefaultValue("true") + @QueryParam("dryRun") + boolean dryRun, + String csv) + throws IOException { + return importCsvInternal(securityContext, name, csv, dryRun); + } + @DELETE @Path("/{id}") @Operation( diff --git a/openmetadata-service/src/main/resources/json/data/database/databaseCsvDocumentation.json b/openmetadata-service/src/main/resources/json/data/database/databaseCsvDocumentation.json index 81448abf2515..6357f94217f9 100644 --- a/openmetadata-service/src/main/resources/json/data/database/databaseCsvDocumentation.json +++ b/openmetadata-service/src/main/resources/json/data/database/databaseCsvDocumentation.json @@ -43,6 +43,24 @@ "`PII.Sensitive;PersonalData.Personal`" ] }, + { + "name": "glossaryTerms", + "required": false, + "description": "Fully qualified glossary term names associated with the database schema separated by ';'. Tags derived from the glossary term are automatically applied to the database schema.", + "examples": [ + "`Glossary.GlossaryTerm1`", + "`Glossary.GlossaryTerm1.GlossaryTerm2`" + ] + }, + { + "name": "tiers", + "required": false, + "description": "Fully qualified tier tags names associated with the table separated by ';'.", + "examples": [ + "`Tier.Tier1`", + "`Tier.Tier2`" + ] + }, { "name": "retentionPeriod", "required": false, diff --git a/openmetadata-service/src/main/resources/json/data/databaseSchema/databaseSchemaCsvDocumentation.json b/openmetadata-service/src/main/resources/json/data/databaseSchema/databaseSchemaCsvDocumentation.json index ce64998dc225..55d0fd342eaf 100644 --- a/openmetadata-service/src/main/resources/json/data/databaseSchema/databaseSchemaCsvDocumentation.json +++ b/openmetadata-service/src/main/resources/json/data/databaseSchema/databaseSchemaCsvDocumentation.json @@ -43,6 +43,24 @@ "`PII.Sensitive;PersonalData.Personal`" ] }, + { + "name": "glossaryTerms", + "required": false, + "description": "Fully qualified glossary term names associated with the database schema separated by ';'. Tags derived from the glossary term are automatically applied to the database schema.", + "examples": [ + "`Glossary.GlossaryTerm1`", + "`Glossary.GlossaryTerm1.GlossaryTerm2`" + ] + }, + { + "name": "tiers", + "required": false, + "description": "Fully qualified tier tags names associated with the table separated by ';'.", + "examples": [ + "`Tier.Tier1`", + "`Tier.Tier2`" + ] + }, { "name": "retentionPeriod", "required": false, diff --git a/openmetadata-service/src/main/resources/json/data/databaseService/databaseServiceCsvDocumentation.json b/openmetadata-service/src/main/resources/json/data/databaseService/databaseServiceCsvDocumentation.json new file mode 100644 index 000000000000..6c4346b69256 --- /dev/null +++ b/openmetadata-service/src/main/resources/json/data/databaseService/databaseServiceCsvDocumentation.json @@ -0,0 +1,73 @@ +{ + "summary": "Database Service CSV file is used for importing and exporting service metadata from and to an **existing** database service.", + "headers": [ + { + "name": "name", + "required": true, + "description": "The name of the database schema being updated.", + "examples": [ + "`users`, `customers`" + ] + }, + { + "name": "displayName", + "required": false, + "description": "Display name for the table.", + "examples": [ + "`User Schema`, `Customer Schema`" + ] + }, + { + "name": "description", + "required": false, + "description": "Description for the database schema in Markdown format.", + "examples": [ + "`Customer Schema` that contains all the tables related to customer entity." + ] + }, + { + "name": "owner", + "required": false, + "description": "Owner names separated by ';'. For team owner, include prefix team. For user owner, include prefix user.", + "examples": [ + "`team;marketing`", + "`user;john`" + ] + }, + { + "name": "tags", + "required": false, + "description": "Fully qualified classification tag names associated with the database schema separated by ';'.. These tags are automatically applied along with the glossary term, when it is used to label an entity.", + "examples": [ + "`PII.Sensitive`", + "`PII.Sensitive;PersonalData.Personal`" + ] + }, + { + "name": "glossaryTerms", + "required": false, + "description": "Fully qualified glossary term names associated with the database schema separated by ';'. Tags derived from the glossary term are automatically applied to the database schema.", + "examples": [ + "`Glossary.GlossaryTerm1`", + "`Glossary.GlossaryTerm1.GlossaryTerm2`" + ] + }, + { + "name": "tiers", + "required": false, + "description": "Fully qualified tier tags names associated with the table separated by ';'.", + "examples": [ + "`Tier.Tier1`", + "`Tier.Tier2`" + ] + }, + { + "name": "domain", + "required": false, + "description": "Domain to which the database schema belongs to", + "examples": [ + "Marketing", "Sales" + ] + } + ] +} \ No newline at end of file diff --git a/openmetadata-service/src/main/resources/json/data/table/tableCsvDocumentation.json b/openmetadata-service/src/main/resources/json/data/table/tableCsvDocumentation.json index b6a1727e0786..c528965ab0f7 100644 --- a/openmetadata-service/src/main/resources/json/data/table/tableCsvDocumentation.json +++ b/openmetadata-service/src/main/resources/json/data/table/tableCsvDocumentation.json @@ -43,6 +43,24 @@ "`PII.Sensitive;PersonalData.Personal`" ] }, + { + "name": "glossaryTerms", + "required": false, + "description": "Fully qualified glossary term names associated with the table separated by ';'.. These tags are automatically applied along with the glossary term, when it is used to label an entity.", + "examples": [ + "`Glossary.GlossaryTerm1`", + "`Glossary.GlossaryTerm1.GlossaryTerm2`" + ] + }, + { + "name": "tiers", + "required": false, + "description": "Fully qualified tier tags names associated with the table separated by ';'.", + "examples": [ + "`Tier.Tier1`", + "`Tier.Tier2`" + ] + }, { "name": "retentionPeriod", "required": false, @@ -99,6 +117,31 @@ "array", "map" ] }, + { + "name": "column.dataType", + "required": false, + "description": "Actual Column data type.", + "examples": [ + "BLOB", "DATE" + ] + }, + { + "name": "column.arrayDataType", + "required": false, + "description": "In case of data Type being Array, type of Array Data.", + "examples": [ + "BLOB", "DATE" + ] + }, + + { + "name": "column.dataLength", + "required": false, + "description": "Data Length of Column in case of CHAR, VARCHAR, BINARY etc.", + "examples": [ + "36" + ] + }, { "name": "column.tags", "required": false, @@ -107,6 +150,15 @@ "`PII.Sensitive`", "`PII.Sensitive;PersonalData.Personal`" ] + }, + { + "name": "column.glossaryTerms", + "required": false, + "description": "Fully qualified glossary term names associated with the column separated by ';'.. Tags automatically derived along with some glossaryTerm will be in `tags`.", + "examples": [ + "`Glossary.GlossaryTerm1`", + "`Glossary.GlossaryTerm1.GlossaryTerm2`" + ] } ] } \ No newline at end of file diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseResourceTest.java index 69a346248fe9..e6118c7a08fc 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseResourceTest.java @@ -25,6 +25,7 @@ import static org.openmetadata.csv.EntityCsvTest.assertSummary; import static org.openmetadata.csv.EntityCsvTest.createCsv; import static org.openmetadata.csv.EntityCsvTest.getFailedRecord; +import static org.openmetadata.csv.EntityCsvTest.getSuccessRecord; import static org.openmetadata.service.util.EntityUtil.getFqn; import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; import static org.openmetadata.service.util.TestUtils.assertListNotEmpty; @@ -44,6 +45,7 @@ import org.openmetadata.schema.api.data.CreateDatabase; import org.openmetadata.schema.api.data.CreateDatabaseSchema; import org.openmetadata.schema.entity.data.Database; +import org.openmetadata.schema.entity.data.DatabaseSchema; import org.openmetadata.schema.type.ApiStatus; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.csv.CsvImportResult; @@ -117,7 +119,7 @@ void testImportInvalidCsv() { // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain // Update databaseSchema with invalid tags field String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseCsv.HEADERS)); - String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,"; + String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,"; String csv = createCsv(DatabaseCsv.HEADERS, listOf(record), null); CsvImportResult result = importCsv(databaseName, csv, false); assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); @@ -127,18 +129,27 @@ resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag" }; assertRows(result, expectedRows); - // Existing schema can be updated. New schema can't be created. - record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,"; + // invalid tag it will give error. + record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,"; csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); result = importCsv(databaseName, csv, false); assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); - String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), "non-existing"); expectedRows = new String[] { - resultsHeader, - getFailedRecord(record, entityNotFound(0, Entity.DATABASE_SCHEMA, schemaFqn)) + resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag")) }; assertRows(result, expectedRows); + + // databaseSchema will be created if it does not exist + String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), "non-existing"); + record = "non-existing,dsp1,dsc1,,,,,,,"; + csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); + result = importCsv(databaseName, csv, false); + assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0); + expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity created")}; + assertRows(result, expectedRows); + DatabaseSchema createdSchema = schemaTest.getEntityByName(schemaFqn, "id", ADMIN_AUTH_HEADERS); + assertEquals(schemaFqn, createdSchema.getFullyQualifiedName()); } @Test @@ -150,11 +161,12 @@ void testImportExport() throws IOException { schemaTest.createRequest("s1").withDatabase(database.getFullyQualifiedName()); schemaTest.createEntity(createSchema, ADMIN_AUTH_HEADERS); - // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain + // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, retentionPeriod, + // sourceUrl, domain // Update terms with change in description String record = String.format( - "s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s", + "s1,dsp1,new-dsc1,user;%s,,,Tier.Tier1,P23DT23H,http://test.com,%s", user1, escapeCsv(DOMAIN.getFullyQualifiedName())); // Update created entity with changes diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseSchemaResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseSchemaResourceTest.java index 6762afb3b4f2..ad2dd103249b 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseSchemaResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseSchemaResourceTest.java @@ -25,6 +25,7 @@ import static org.openmetadata.csv.EntityCsvTest.assertSummary; import static org.openmetadata.csv.EntityCsvTest.createCsv; import static org.openmetadata.csv.EntityCsvTest.getFailedRecord; +import static org.openmetadata.csv.EntityCsvTest.getSuccessRecord; import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; import static org.openmetadata.service.util.TestUtils.assertListNotNull; import static org.openmetadata.service.util.TestUtils.assertListNull; @@ -118,7 +119,7 @@ void testImportInvalidCsv() { // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain // Create table with invalid tags field String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseSchemaCsv.HEADERS)); - String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,"; + String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,"; String csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); CsvImportResult result = importCsv(schemaName, csv, false); assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); @@ -128,17 +129,27 @@ resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag" }; assertRows(result, expectedRows); - // Existing table can be updated. New table can't be created. - record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,"; + // Tag will cause failure + record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,"; csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); result = importCsv(schemaName, csv, false); assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); - String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), "non-existing"); expectedRows = new String[] { - resultsHeader, getFailedRecord(record, entityNotFound(0, Entity.TABLE, tableFqn)) + resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag")) }; assertRows(result, expectedRows); + + // non-existing table will cause + record = "non-existing,dsp1,dsc1,,,,,,,"; + String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), "non-existing"); + csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); + result = importCsv(schemaName, csv, false); + assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0); + expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity created")}; + assertRows(result, expectedRows); + Table table = tableTest.getEntityByName(tableFqn, "id", ADMIN_AUTH_HEADERS); + assertEquals(tableFqn, table.getFullyQualifiedName()); } @Test @@ -155,7 +166,7 @@ void testImportExport() throws IOException { List updateRecords = listOf( String.format( - "s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s", + "s1,dsp1,new-dsc1,user;%s,,,Tier.Tier1,P23DT23H,http://test.com,%s", user1, escapeCsv(DOMAIN.getFullyQualifiedName()))); // Update created entity with changes diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/TableResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/TableResourceTest.java index 6be2ef2905f3..27a1205af67f 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/TableResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/TableResourceTest.java @@ -32,6 +32,7 @@ import static org.openmetadata.csv.EntityCsvTest.assertSummary; import static org.openmetadata.csv.EntityCsvTest.createCsv; import static org.openmetadata.csv.EntityCsvTest.getFailedRecord; +import static org.openmetadata.csv.EntityCsvTest.getSuccessRecord; import static org.openmetadata.schema.type.ColumnDataType.ARRAY; import static org.openmetadata.schema.type.ColumnDataType.BIGINT; import static org.openmetadata.schema.type.ColumnDataType.BINARY; @@ -2294,7 +2295,7 @@ void testImportInvalidCsv() { // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain // Create table with invalid tags field String resultsHeader = recordToString(EntityCsv.getResultHeaders(TableCsv.HEADERS)); - String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,c1,c1,c1,INT,"; + String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,,c1,c1,c1,,INT,,,,"; String csv = createCsv(TableCsv.HEADERS, listOf(record), null); CsvImportResult result = importCsv(tableName, csv, false); assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); @@ -2306,26 +2307,23 @@ void testImportInvalidCsv() { assertRows(result, expectedRows); // Add an invalid column tag - record = "s1,dsp1,dsc1,,,,,,c1,,,,Tag.invalidTag"; + record = "s1,dsp1,dsc1,,,,,,,,c1,,,,INT,,,Tag.invalidTag,"; csv = createCsv(TableCsv.HEADERS, listOf(record), null); result = importCsv(tableName, csv, false); assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); expectedRows = new String[] { resultsHeader, - getFailedRecord(record, EntityCsv.entityNotFound(12, "tag", "Tag.invalidTag")) + getFailedRecord(record, EntityCsv.entityNotFound(17, "tag", "Tag.invalidTag")) }; assertRows(result, expectedRows); - // Update a non existing column - record = "s1,dsp1,dsc1,,,,,,nonExistingColumn,,,,"; + // Update a non-existing column, this should create a new column with name "nonExistingColumn" + record = "s1,dsp1,dsc1,,,,,,,,nonExistingColumn,,,,INT,,,,"; csv = createCsv(TableCsv.HEADERS, listOf(record), null); result = importCsv(tableName, csv, false); - assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); - expectedRows = - new String[] { - resultsHeader, getFailedRecord(record, EntityCsv.columnNotFound(8, "nonExistingColumn")) - }; + assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0); + expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity updated")}; assertRows(result, expectedRows); } @@ -2341,17 +2339,18 @@ void testImportExport() throws IOException { createRequest("s1").withColumns(listOf(c1, c2, c3)).withTableConstraints(null); Table table = createEntity(createTable, ADMIN_AUTH_HEADERS); - // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain + // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod, + // sourceUrl, domain // Update terms with change in description List updateRecords = listOf( String.format( - "s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s,c1," - + "dsp1-new,desc1,type,PII.Sensitive", + "s1,dsp1,new-dsc1,user;%s,,,Tier.Tier1,P23DT23H,http://test.com,%s,c1," + + "dsp1-new,desc1,type,STRUCT,,,PII.Sensitive,", user1, escapeCsv(DOMAIN.getFullyQualifiedName())), - ",,,,,,,,c1.c11,dsp11-new,desc11,type1,PII.Sensitive", - ",,,,,,,,c2,,,,", - ",,,,,,,,c3,,,,"); + ",,,,,,,,,,c1.c11,dsp11-new,desc11,type1,INT,,,PII.Sensitive,", + ",,,,,,,,,,c2,,,type1,INT,,,,", + ",,,,,,,,,,c3,,,type1,INT,,,,"); // Update created entity with changes importCsvAndValidate(table.getFullyQualifiedName(), TableCsv.HEADERS, null, updateRecords); diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java index 1b089953b193..8561e2cdf53c 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java @@ -413,10 +413,10 @@ void testGlossaryImportExport() throws IOException { List createRecords = listOf( String.format( - ",g1,dsp1,\"dsc1,1\",h1;h2;h3,,term1;http://term1,Tier.Tier1,%s;%s,user;%s,%s", + ",g1,dsp1,\"dsc1,1\",h1;h2;h3,,term1;http://term1,PII.None,%s;%s,user;%s,%s", user1, user2, user1, "Approved"), String.format( - ",g2,dsp2,dsc3,h1;h3;h3,,term2;https://term2,Tier.Tier2,%s,user;%s,%s", + ",g2,dsp2,dsc3,h1;h3;h3,,term2;https://term2,PII.NonSensitive,%s,user;%s,%s", user1, user2, "Approved"), String.format( "importExportTest.g1,g11,dsp2,dsc11,h1;h3;h3,,,,%s,team;%s,%s", @@ -426,10 +426,10 @@ void testGlossaryImportExport() throws IOException { List updateRecords = listOf( String.format( - ",g1,dsp1,new-dsc1,h1;h2;h3,,term1;http://term1,Tier.Tier1,%s;%s,user;%s,%s", + ",g1,dsp1,new-dsc1,h1;h2;h3,,term1;http://term1,PII.None,%s;%s,user;%s,%s", user1, user2, user1, "Approved"), String.format( - ",g2,dsp2,new-dsc3,h1;h3;h3,,term2;https://term2,Tier.Tier2,%s,user;%s,%s", + ",g2,dsp2,new-dsc3,h1;h3;h3,,term2;https://term2,PII.NonSensitive,%s,user;%s,%s", user1, user2, "Approved"), String.format( "importExportTest.g1,g11,dsp2,new-dsc11,h1;h3;h3,,,,%s,team;%s,%s", @@ -437,7 +437,7 @@ void testGlossaryImportExport() throws IOException { // Add new row to existing rows List newRecords = - listOf(",g3,dsp0,dsc0,h1;h2;h3,,term0;http://term0,Tier.Tier3,,,Approved"); + listOf(",g3,dsp0,dsc0,h1;h2;h3,,term0;http://term0,PII.Sensitive,,,Approved"); testImportExport( glossary.getName(), GlossaryCsv.HEADERS, createRecords, updateRecords, newRecords); }