From f3035a0640aeeaa0edceae428ce662e619587163 Mon Sep 17 00:00:00 2001 From: shantstepanian <17996546+shantstepanian@users.noreply.github.com> Date: Wed, 4 Dec 2019 17:24:38 -0500 Subject: [PATCH] Preventing concurrent deployments against an environment [#256] - see [#111] also --- CHANGELOG.md | 23 + .../com/gs/obevo/api/platform/AuditLock.java | 36 + .../gs/obevo/api/platform/ChangeAuditDao.java | 3 + .../java/com/gs/obevo/impl/MainDeployer.kt | 405 +++--- .../obevo/impl/changeauditdao/InMemLock.java | 35 + .../IncrementalChangeTypeCommandCalculator.kt | 4 +- .../FixedAbstractYAMLBasedConfiguration.java | 2 +- .../db2/Db2ToInMemorySqlTranslator.java | 162 +-- .../obevo-db-postgresql/dbviewer.sh | 37 + .../obevo-db-postgresql/getIpForDbviewer.sh | 29 + .../platforms/postgresql/PostgreSqlLock.kt | 56 + .../postgresql/PostgreSqlSqlExecutor.java | 99 +- .../postgresql/PostgreSqlDeployerIT.java | 30 +- .../gs/obevo/db/api/platform/SqlExecutor.java | 6 + .../changeauditdao/NoOpChangeAuditDao.java | 121 +- .../SameSchemaChangeAuditDao.java | 6 + .../impl/platforms/AbstractSqlExecutor.java | 218 ++-- .../syntaxparser/UnparseVisitor.java | 1106 ++++++++--------- .../mongodb/impl/MongoDbChangeAuditDao.java | 7 + .../src/site/markdown/app-versioning.md | 162 +++ .../src/site/markdown/dev-setup-sybase-ase.md | 2 +- 21 files changed, 1508 insertions(+), 1041 deletions(-) create mode 100644 obevo-core/src/main/java/com/gs/obevo/api/platform/AuditLock.java create mode 100644 obevo-core/src/main/java/com/gs/obevo/impl/changeauditdao/InMemLock.java create mode 100755 obevo-db-impls/obevo-db-postgresql/dbviewer.sh create mode 100755 obevo-db-impls/obevo-db-postgresql/getIpForDbviewer.sh create mode 100644 obevo-db-impls/obevo-db-postgresql/src/main/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlLock.kt create mode 100644 obevo-site/src/site/markdown/app-versioning.md diff --git a/CHANGELOG.md b/CHANGELOG.md index cd51d767..5afeef00 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,9 +3,12 @@ ## 8.0.0 ### Functionality Improvements +#111: Preventing concurrent deploys against a given schema + ### Technical Improvements Upgrading to JDK 8 bytecode + Upgrading to JGraphT 1.3.0 (first required JDK 8 dependency) ### Bug Fixes @@ -16,14 +19,20 @@ Correcting error messages on graph cycles for complex databases ### Functionality Improvements #239: MongoDB productionization: collection now treated as an incremental change type, reverse-engineering support, clean functionality built + #231 #233: Correct reverse-engineering of table indices to their correct tables + #232 #233: Support reverse-engineering of triggers + #231 #235: Removing redundant unique indexes for Oracle Primary Key reverse-engineering + #236: Support Character Set Encoding for Oracle Reverse Engineering + Allowing valid special characters (e.g. #) to be in object names, as certain RDBMS platforms also allow them ### Technical Improvements DB2 build updated to use the new Docker Hub coordinates from IBM + #252: Optimize Images 610.60kb -> 448.30kb (26.58%) and fix typo ### Bug Fixes @@ -34,7 +43,9 @@ DB2 build updated to use the new Docker Hub coordinates from IBM ### Functionality Improvements #182: Adding Hibernate reverse-engineering API. See [ORM Integration docs](https://goldmansachs.github.io/obevo/orm-integration.html) for more details. + #221 #223 #225: Oracle reverse-engineering improvements - unicode characters, nested tables, types, comments + #228: PostgreSQL improvements for kata - reverse-engineering, in-memory databases ### Technical Improvements @@ -53,6 +64,7 @@ DB2 build updated to use the new Docker Hub coordinates from IBM ### Bug Fixes #212: Fixing inability to handle DB2 reorg exceptions during static data queries. Previously, reorg detection only worked on update statements + #210 #213: Oracle - ignoring DATABASE LINKs during reverse-engineering, instead of erroring out. Separate ticket #186 is there for DATABASE LINK and materialized view support @@ -60,13 +72,18 @@ DB2 build updated to use the new Docker Hub coordinates from IBM ### Functionality Improvements #199: Adding support for PostgreSQL roles and extensions in the environment setup step + #202: Add option to export graph representation to a file + #196: Adding UUID support for CSV data loads for PostgreSQL + Initial MySQL support (still in Alpha) ### Technical Improvements Moving more of the code over to Kotlin + #153: Refactor of dependency implementation + #193: Docker onboarding for testing ### Bug Fixes @@ -82,6 +99,7 @@ Moving more of the code over to Kotlin ### Bug Fixes #188: Correcting the metadata retrieval for ASE and PostgreSQL + #184: Documentation cleanups @@ -92,13 +110,18 @@ Moving more of the code over to Kotlin ### Technical Improvements #173: Support YAML/JSON configurations and move away from type safe config towards commons-config v2 + #175: Removing retrolambda, moving back to Java 7, and initial support for Kotlin + #150: Documentation updates ### Bug Fixes #125: Clarify error messages when reading merge config file (missing input dirs, forward-slashes specified) + #165: Supporting Unicode in regular table/db object files and avoiding "TokenMgrError: Lexical error at line ..." issues + #169: Fixing missing quotes in deploy.sh/bat files in obevo-cli; otherwise, spaces in JAVA_HOME or OBEVO_HOME were not supported. + #166: Clearer error message if an invalid platform type is specified in config diff --git a/obevo-core/src/main/java/com/gs/obevo/api/platform/AuditLock.java b/obevo-core/src/main/java/com/gs/obevo/api/platform/AuditLock.java new file mode 100644 index 00000000..93113496 --- /dev/null +++ b/obevo-core/src/main/java/com/gs/obevo/api/platform/AuditLock.java @@ -0,0 +1,36 @@ +/** + * Copyright 2017 Goldman Sachs. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.gs.obevo.api.platform; + +import com.gs.obevo.api.appdata.PhysicalSchema; + +/** + * Component to ensure that only a single client can invoke a deploy on an Obevo environment. + * This component is mainly called from MainDeployer. + * As of today, the lock is environment wide (i.e. not per {@link PhysicalSchema}); this may be refactored in the future. + */ +public interface AuditLock { + /** + * Acquire a lock on the environment. + */ + void lock(); + + /** + * Release the lock on the environment. Okay to throw exceptions here, as MainDeployer will handle + * and ignore exceptions when calling this. + */ + void unlock(); +} diff --git a/obevo-core/src/main/java/com/gs/obevo/api/platform/ChangeAuditDao.java b/obevo-core/src/main/java/com/gs/obevo/api/platform/ChangeAuditDao.java index b24d422a..56016578 100644 --- a/obevo-core/src/main/java/com/gs/obevo/api/platform/ChangeAuditDao.java +++ b/obevo-core/src/main/java/com/gs/obevo/api/platform/ChangeAuditDao.java @@ -18,6 +18,7 @@ import com.gs.obevo.api.appdata.Change; import com.gs.obevo.api.appdata.DeployExecution; import org.eclipse.collections.api.list.ImmutableList; +import org.jetbrains.annotations.NotNull; /** * Interface to access the audit table for a given environment. @@ -66,4 +67,6 @@ public interface ChangeAuditDao { * Removes all changes related to the incoming changed object based on the {@link Change#getObjectKey()}. */ void deleteObjectChanges(Change change); + + @NotNull AuditLock acquireLock(); } diff --git a/obevo-core/src/main/java/com/gs/obevo/impl/MainDeployer.kt b/obevo-core/src/main/java/com/gs/obevo/impl/MainDeployer.kt index a047124d..abbfc998 100644 --- a/obevo-core/src/main/java/com/gs/obevo/impl/MainDeployer.kt +++ b/obevo-core/src/main/java/com/gs/obevo/impl/MainDeployer.kt @@ -53,7 +53,17 @@ import org.eclipse.collections.impl.block.factory.Predicates import org.eclipse.collections.impl.block.factory.StringFunctions import org.eclipse.collections.impl.factory.Lists import org.eclipse.collections.impl.factory.Sets +import org.jgrapht.Graph +import org.jgrapht.graph.DefaultEdge +import org.jgrapht.io.ComponentNameProvider +import org.jgrapht.io.DOTExporter +import org.jgrapht.io.GmlExporter +import org.jgrapht.io.GraphMLExporter +import org.jgrapht.io.IntegerComponentNameProvider +import org.jgrapht.io.MatrixExporter import org.slf4j.LoggerFactory +import java.io.FileWriter +import java.io.Writer import java.sql.Timestamp import java.util.Date import java.util.concurrent.TimeUnit @@ -132,239 +142,252 @@ class MainDeployer

>( val deployStrategy = getDeployMode(deployerArgs) - val deployedChanges = readDeployedChanges(deployerArgs) - mainInputReader.logChanges("deployed", deployedChanges) - - // TODO ensure that we've handled the split between static data and others properly - val changeInputSetMap = this.textDependencyExtractor.calculateDependencies( - changeInputs.filter { it.changeKey.changeType.isEnrichableForDependenciesInText } - ) - - val newChangeInputSetMap = mutableMapOf>() - val packageChanges = changeInputs.filter { it.objectKey.changeType.name == ChangeType.PACKAGE_STR || it.objectKey.changeType.name == ChangeType.PACKAGE_BODY } - .map { it.objectKey.objectName }.toSet() - changeInputSetMap.onEach { entry -> - val change = entry.key - val dependencies = entry.value - if (change.objectKey.changeType.name == ChangeType.PACKAGE_BODY) { - newChangeInputSetMap.put(change, dependencies.filterNot { packageChanges.contains(it.target) }.toSet()) - } else { - newChangeInputSetMap.put(change, dependencies) - } - } + val lock = artifactDeployerDao.acquireLock() + try { + LOG.info("Attempting to acquire deploy lock") + lock.lock() + LOG.info("Deploy lock acquired") - var sourceChanges = changeInputs.collect { input -> - val change: Change - if (input.isRerunnable) { - change = ChangeRerunnable(input.changeKey, input.contentHash, input.content) - } else { - change = ChangeIncremental( - input.changeKey, - input.orderWithinObject, - input.contentHash, - input.content, - input.rollbackIfAlreadyDeployedContent, - input.isActive - ) - change.isDrop = input.isDrop - change.baselinedChanges = input.baselinedChanges - change.parallelGroup = input.parallelGroup - change.isKeepIncrementalOrder = input.isKeepIncrementalOrder - } + val deployedChanges = readDeployedChanges(deployerArgs) + mainInputReader.logChanges("deployed", deployedChanges) - change.metadataSection = input.metadataSection + // TODO ensure that we've handled the split between static data and others properly + val changeInputSetMap = this.textDependencyExtractor.calculateDependencies( + changeInputs.filter { it.changeKey.changeType.isEnrichableForDependenciesInText } + ) - // TODO should not need separate converted*Content fields in Change. Should only be in ChangeInput - see GITHUB#191 - change.convertedContent = input.convertedContent - change.rollbackContent = input.rollbackContent - change.convertedRollbackContent = input.convertedRollbackContent + val newChangeInputSetMap = mutableMapOf>() + val packageChanges = changeInputs.filter { it.objectKey.changeType.name == ChangeType.PACKAGE_STR || it.objectKey.changeType.name == ChangeType.PACKAGE_BODY } + .map { it.objectKey.objectName }.toSet() + changeInputSetMap.onEach { entry -> + val change = entry.key + val dependencies = entry.value + if (change.objectKey.changeType.name == ChangeType.PACKAGE_BODY) { + newChangeInputSetMap.put(change, dependencies.filterNot { packageChanges.contains(it.target) }.toSet()) + } else { + newChangeInputSetMap.put(change, dependencies) + } + } - change.changeInput = input - change.orderWithinObject = input.orderWithinObject + var sourceChanges = changeInputs.collect { input -> + val change: Change + if (input.isRerunnable) { + change = ChangeRerunnable(input.changeKey, input.contentHash, input.content) + } else { + change = ChangeIncremental( + input.changeKey, + input.orderWithinObject, + input.contentHash, + input.content, + input.rollbackIfAlreadyDeployedContent, + input.isActive + ) + change.isDrop = input.isDrop + change.baselinedChanges = input.baselinedChanges + change.parallelGroup = input.parallelGroup + change.isKeepIncrementalOrder = input.isKeepIncrementalOrder + } - change.order = input.order - change.applyGrants = input.applyGrants - change.changeset = input.changeset + change.metadataSection = input.metadataSection - change.codeDependencies = Sets.immutable.withAll( - newChangeInputSetMap.get(input) // option 1 - use the inputs extracted from the next if possible - ?: input.codeDependencies // option 2 - use the pre-populated codeDependencies value - ?: emptySet() // fallback - default to empty set - ) + // TODO should not need separate converted*Content fields in Change. Should only be in ChangeInput - see GITHUB#191 + change.convertedContent = input.convertedContent + change.rollbackContent = input.rollbackContent + change.convertedRollbackContent = input.convertedRollbackContent - change.dropContent = input.dropContent - change.permissionScheme = input.permissionScheme + change.changeInput = input + change.orderWithinObject = input.orderWithinObject - return@collect change - } + change.order = input.order + change.applyGrants = input.applyGrants + change.changeset = input.changeset - // add rollback scripts here + change.codeDependencies = Sets.immutable.withAll( + newChangeInputSetMap.get(input) // option 1 - use the inputs extracted from the next if possible + ?: input.codeDependencies // option 2 - use the pre-populated codeDependencies value + ?: emptySet() // fallback - default to empty set + ) - val changePairs = ChangesetCreator.getChangePairs(deployedChanges, sourceChanges) + change.dropContent = input.dropContent + change.permissionScheme = input.permissionScheme - if (deployerArgs.isRollback) { - // Add back rollback changes to the sourceList so that they can take part in the change calculation - val rollbacksToAddBack = changePairs - .filter { !it.changeKey.changeType.isRerunnable && it.sourceChange == null && it.deployedChange != null } - .map { it.deployedChange as ChangeIncremental } + return@collect change + } - rollbacksToAddBack.forEach { it.isRollbackActivated = true } + // add rollback scripts here - sourceChanges = sourceChanges.newWithAll(rollbacksToAddBack) - } + val changePairs = ChangesetCreator.getChangePairs(deployedChanges, sourceChanges) - // TODO refactor into separate method - if (env.platform.isDropOrderRequired) { - // In this block, we set the "dependentChanges" field on the drop objects to ensure they can be sorted for dependencies later on - val dropsToEnrich = changePairs - .filter { it.changeKey.changeType.isRerunnable && it.sourceChange == null && it.deployedChange != null } - .map { it.deployedChange!! } + if (deployerArgs.isRollback) { + // Add back rollback changes to the sourceList so that they can take part in the change calculation + val rollbacksToAddBack = changePairs + .filter { !it.changeKey.changeType.isRerunnable && it.sourceChange == null && it.deployedChange != null } + .map { it.deployedChange as ChangeIncremental } - val dropsByObjectName = dropsToEnrich.associateBy { env.platform.convertDbObjectName().valueOf(it.objectName) } + rollbacksToAddBack.forEach { it.isRollbackActivated = true } - val dropsForTextProcessing = dropsToEnrich.map { drop -> - val sql = changeTypeBehaviorRegistry.getChangeTypeBehavior(drop.changeType).getDefinitionFromEnvironment(drop); - LOG.debug("Found the sql from the DB for dropping: {}", sql) - TextDependencyExtractableImpl(drop.objectName, sql ?: "", drop) + sourceChanges = sourceChanges.newWithAll(rollbacksToAddBack) } - val dropDependencies = this.textDependencyExtractor.calculateDependencies(dropsForTextProcessing) + // TODO refactor into separate method + if (env.platform.isDropOrderRequired) { + // In this block, we set the "dependentChanges" field on the drop objects to ensure they can be sorted for dependencies later on + val dropsToEnrich = changePairs + .filter { it.changeKey.changeType.isRerunnable && it.sourceChange == null && it.deployedChange != null } + .map { it.deployedChange!! } - dropsForTextProcessing.forEach { it.codeDependencies = Sets.immutable.ofAll(dropDependencies.get(it)) } + val dropsByObjectName = dropsToEnrich.associateBy { env.platform.convertDbObjectName().valueOf(it.objectName) } - for (drop in dropsForTextProcessing) { - drop.codeDependencies?.let { deps -> - if (deps.notEmpty()) { - drop.payload.dependentChanges = Sets.immutable.ofAll(deps.map { dropsByObjectName[it.target] }) - } + val dropsForTextProcessing = dropsToEnrich.map { drop -> + val sql = changeTypeBehaviorRegistry.getChangeTypeBehavior(drop.changeType).getDefinitionFromEnvironment(drop); + LOG.debug("Found the sql from the DB for dropping: {}", sql) + TextDependencyExtractableImpl(drop.objectName, sql ?: "", drop) } - } - } + val dropDependencies = this.textDependencyExtractor.calculateDependencies(dropsForTextProcessing) - val dependencyGraph = graphEnricher.createDependencyGraph(sourceChanges, deployerArgs.isRollback) + dropsForTextProcessing.forEach { it.codeDependencies = Sets.immutable.ofAll(dropDependencies.get(it)) } - deployerArgs.sourceGraphExportFile?.let { sourceGraphOutputFile -> - val exporterFormat = deployerArgs.sourceGraphExportFormat ?: GraphExportFormat.DOT - // TODO undo this change -// val exporterFunc = getExporterFunc(exporterFormat) -// FileWriter(sourceGraphOutputFile).use { exporterFunc(it, dependencyGraph) } - } - - sourceChanges.each { it.dependentChanges = Sets.immutable.ofAll(GraphUtil.getDependencyNodes(dependencyGraph, it)) } + for (drop in dropsForTextProcessing) { + drop.codeDependencies?.let { deps -> + if (deps.notEmpty()) { + drop.payload.dependentChanges = Sets.immutable.ofAll(deps.map { dropsByObjectName[it.target] }) + } + } + } + } - val artifactsToProcess = changesetCreator.determineChangeset(changePairs, sourceChanges, deployStrategy.isInitAllowedOnHashExceptions) - .applyDeferredPredicate(deployerArgs.changesetPredicate) - validatePriorToDeployment(env, deployStrategy, sourceChanges, deployedChanges, artifactsToProcess) - deployerPlugin.validatePriorToDeployment(env, deployStrategy, sourceChanges, deployedChanges, artifactsToProcess) + val dependencyGraph = graphEnricher.createDependencyGraph(sourceChanges, deployerArgs.isRollback) - if (this.shouldProceedWithDbChange(artifactsToProcess, deployerArgs)) { - for (schema in env.physicalSchemas) { - deployerPlugin.initializeSchema(env, schema) + deployerArgs.sourceGraphExportFile?.let { sourceGraphOutputFile -> + val exporterFormat = deployerArgs.sourceGraphExportFormat ?: GraphExportFormat.DOT + // TODO undo this change + val exporterFunc = getExporterFunc(exporterFormat) + FileWriter(sourceGraphOutputFile).use { exporterFunc(it, dependencyGraph) } } - // Note - only init the audit table if we actually proceed w/ a deploy - this.deployExecutionDao.init() - this.artifactDeployerDao.init() - - val executionsBySchema = env.schemas.associateBy({it.name}, { schema -> - val deployExecution = DeployExecutionImpl( - deployerArgs.deployRequesterId, - credential.username, - schema.name, - PlatformConfiguration.getInstance().toolVersion, - Timestamp(Date().time), - deployerArgs.isPerformInitOnly, - deployerArgs.isRollback, - deployerArgs.productVersion, - deployerArgs.reason, - deployerArgs.deployExecutionAttributes - ) - deployExecution.status = DeployExecutionStatus.IN_PROGRESS - deployExecutionDao.persistNew(deployExecution, env.getPhysicalSchema(schema.name)) - deployExecution - }) - - // If there are no deployments required, then just update the artifact tables and return - if (!artifactsToProcess.isDeploymentNeeded) { - LOG.info("No changes detected in the database deployment. Updating Deploy Status") - executionsBySchema.values.forEach { deployExecution -> - deployExecution.status = DeployExecutionStatus.SUCCEEDED - this.deployExecutionDao.update(deployExecution) + sourceChanges.each { it.dependentChanges = Sets.immutable.ofAll(GraphUtil.getDependencyNodes(dependencyGraph, it)) } + + val artifactsToProcess = changesetCreator.determineChangeset(changePairs, sourceChanges, deployStrategy.isInitAllowedOnHashExceptions) + .applyDeferredPredicate(deployerArgs.changesetPredicate) + + validatePriorToDeployment(env, deployStrategy, sourceChanges, deployedChanges, artifactsToProcess) + deployerPlugin.validatePriorToDeployment(env, deployStrategy, sourceChanges, deployedChanges, artifactsToProcess) + + if (this.shouldProceedWithDbChange(artifactsToProcess, deployerArgs)) { + env.physicalSchemas.forEach { deployerPlugin.initializeSchema(env, it) } + + // Note - only init the audit table if we actually proceed w/ a deploy + this.deployExecutionDao.init() + this.artifactDeployerDao.init() + + val executionsBySchema = env.schemas.associateBy({ it.name }, { schema -> + val deployExecution = DeployExecutionImpl( + deployerArgs.deployRequesterId, + credential.username, + schema.name, + PlatformConfiguration.getInstance().toolVersion, + Timestamp(Date().time), + deployerArgs.isPerformInitOnly, + deployerArgs.isRollback, + deployerArgs.productVersion, + deployerArgs.reason, + deployerArgs.deployExecutionAttributes + ) + deployExecution.status = DeployExecutionStatus.IN_PROGRESS + deployExecutionDao.persistNew(deployExecution, env.getPhysicalSchema(schema.name)) + deployExecution + }) + + // If there are no deployments required, then just update the artifact tables and return + if (!artifactsToProcess.isDeploymentNeeded) { + LOG.info("No changes detected in the database deployment. Updating Deploy Status") + executionsBySchema.values.forEach { deployExecution -> + deployExecution.status = DeployExecutionStatus.SUCCEEDED + this.deployExecutionDao.update(deployExecution) + } + return } - return - } - - val action = if (deployerArgs.isRollback) "Rollback" else "Deployment" - var mainDeploymentSuccess = false - val cec = CommandExecutionContext() - try { - this.doExecute(artifactsToProcess, deployStrategy, onboardingStrategy, executionsBySchema, cec) - LOG.info("$action has Completed Successfully!") - executionsBySchema.values.forEach { deployExecution -> - deployExecution.status = DeployExecutionStatus.SUCCEEDED - this.deployExecutionDao.update(deployExecution) - } + val action = if (deployerArgs.isRollback) "Rollback" else "Deployment" - mainDeploymentSuccess = true - } catch (exc: RuntimeException) { - LOG.info("$action has Failed. We will error out, but first complete the post-deploy step") - executionsBySchema.values.forEach { deployExecution -> - deployExecution.status = DeployExecutionStatus.FAILED - this.deployExecutionDao.update(deployExecution) - } - throw exc - } finally { - LOG.info("Executing the post-deploy step") + var mainDeploymentSuccess = false + val cec = CommandExecutionContext() try { - deployerPlugin.doPostDeployAction(env, sourceChanges) - this.postDeployAction.value(env) + this.doExecute(artifactsToProcess, deployStrategy, onboardingStrategy, executionsBySchema, cec) + LOG.info("$action has Completed Successfully!") + executionsBySchema.values.forEach { deployExecution -> + deployExecution.status = DeployExecutionStatus.SUCCEEDED + this.deployExecutionDao.update(deployExecution) + } + + mainDeploymentSuccess = true } catch (exc: RuntimeException) { - if (mainDeploymentSuccess) { - LOG.info("Exception found in the post-deploy step", exc) - throw exc - } else { - LOG.error("Exception found in the post-deploy step; printing it out here, but there was an exception during the regular deploy as well", exc) + LOG.info("$action has Failed. We will error out, but first complete the post-deploy step") + executionsBySchema.values.forEach { deployExecution -> + deployExecution.status = DeployExecutionStatus.FAILED + this.deployExecutionDao.update(deployExecution) + } + throw exc + } finally { + LOG.info("Executing the post-deploy step") + try { + deployerPlugin.doPostDeployAction(env, sourceChanges) + this.postDeployAction.value(env) + } catch (exc: RuntimeException) { + if (mainDeploymentSuccess) { + LOG.info("Exception found in the post-deploy step", exc) + throw exc + } else { + LOG.error("Exception found in the post-deploy step; printing it out here, but there was an exception during the regular deploy as well", exc) + } } - } - LOG.info("Post-deploy step completed") + LOG.info("Post-deploy step completed") - val warnings = cec.warnings - if (warnings.notEmpty()) { - LOG.info("") - LOG.info("Summary of warnings from this deployment; please address:\n{}", warnings.collect(StringFunctions.prepend(" ")).makeString("\n")) - } + val warnings = cec.warnings + if (warnings.notEmpty()) { + LOG.info("") + LOG.info("Summary of warnings from this deployment; please address:\n{}", warnings.collect(StringFunctions.prepend(" ")).makeString("\n")) + } - LOG.info("Deploy complete!") + LOG.info("Deploy complete!") + } + } + } finally { + LOG.info("Attempting to release deploy lock") + try { + lock.unlock() + LOG.info("Deploy lock released") + } catch (_: Exception) { + LOG.info("Deploy lock release failed; ignoring exception") } } } -// private fun getExporterFunc(exporterFormat: Enum): (Writer, Graph) -> Unit { -// val vertexNameProvider : ComponentNameProvider = ComponentNameProvider { -// change : Change -> change.objectName + "." + change.changeName -// } -// -// // TODO Temporary - undo this change! -// when (exporterFormat) { -// GraphExportFormat.DOT -> return { writer: Writer, graph: Graph -> -// DOTExporter(IntegerComponentNameProvider(), vertexNameProvider, null).export(writer, graph) -// } -// GraphExportFormat.GML -> return { writer: Writer, graph: Graph -> -// GmlExporter(IntegerComponentNameProvider(), vertexNameProvider, IntegerEdgeNameProvider(), null).export(writer, graph) -// } -// GraphExportFormat.GRAPHML -> return { writer: Writer, graph: Graph -> -// GraphMLExporter(IntegerComponentNameProvider(), vertexNameProvider, IntegerEdgeNameProvider(), null).export(writer, graph) -// } -// GraphExportFormat.MATRIX -> return { writer: Writer, graph: Graph -> -// MatrixExporter().exportAdjacencyMatrix(writer, graph) -// } -// else -> throw IllegalArgumentException("Export Format $exporterFormat is not supported here") -// } -// } + private fun getExporterFunc(exporterFormat: Enum): (Writer, Graph) -> Unit { + val vertexNameProvider : ComponentNameProvider = ComponentNameProvider { + change : Change -> change.objectName + "." + change.changeName + } + + // TODO Temporary - undo this change! + when (exporterFormat) { + GraphExportFormat.DOT -> return { writer: Writer, graph: Graph -> + DOTExporter(IntegerComponentNameProvider(), vertexNameProvider, null).exportGraph(graph, writer) + } + GraphExportFormat.GML -> return { writer: Writer, graph: Graph -> + GmlExporter(IntegerComponentNameProvider(), vertexNameProvider, IntegerComponentNameProvider(), null).exportGraph(graph, writer) + } + GraphExportFormat.GRAPHML -> return { writer: Writer, graph: Graph -> + GraphMLExporter(IntegerComponentNameProvider(), vertexNameProvider, IntegerComponentNameProvider(), null).exportGraph(graph, writer) + } + GraphExportFormat.MATRIX -> return { writer: Writer, graph: Graph -> + MatrixExporter().exportGraph(graph, writer) + } + else -> throw IllegalArgumentException("Export Format $exporterFormat is not supported here") + } + } private fun logArgumentMetrics(deployerArgs: MainDeployerArgs) { deployMetricsCollector.addMetric("args.onboardingMode", deployerArgs.isOnboardingMode) diff --git a/obevo-core/src/main/java/com/gs/obevo/impl/changeauditdao/InMemLock.java b/obevo-core/src/main/java/com/gs/obevo/impl/changeauditdao/InMemLock.java new file mode 100644 index 00000000..061eb660 --- /dev/null +++ b/obevo-core/src/main/java/com/gs/obevo/impl/changeauditdao/InMemLock.java @@ -0,0 +1,35 @@ +/** + * Copyright 2017 Goldman Sachs. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.gs.obevo.impl.changeauditdao; + +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import com.gs.obevo.api.platform.AuditLock; + +public class InMemLock implements AuditLock { + private final Lock lock = new ReentrantLock(); + + @Override + public void lock() { + lock.lock(); + } + + @Override + public void unlock() { + lock.unlock(); + } +} diff --git a/obevo-core/src/main/java/com/gs/obevo/impl/changetypes/IncrementalChangeTypeCommandCalculator.kt b/obevo-core/src/main/java/com/gs/obevo/impl/changetypes/IncrementalChangeTypeCommandCalculator.kt index ecc50e92..b16a79bf 100644 --- a/obevo-core/src/main/java/com/gs/obevo/impl/changetypes/IncrementalChangeTypeCommandCalculator.kt +++ b/obevo-core/src/main/java/com/gs/obevo/impl/changetypes/IncrementalChangeTypeCommandCalculator.kt @@ -148,7 +148,7 @@ class IncrementalChangeTypeCommandCalculator internal constructor(private val nu } } else { if (initAllowedOnHashExceptions) { - // SHANT handle init exceptions + // TODO handle init exceptions changeset.add(changeCommandFactory.createUpdateAuditTableOnly(source, "initOnly")) } else { changeset.add(HashMismatchWarning(source, deployed)) @@ -244,7 +244,7 @@ class IncrementalChangeTypeCommandCalculator internal constructor(private val nu for (baseline in baselinedDrops) { if (!successfulBaselinedChanges.contains(baseline)) { - // SHANT do the baseline check here (collect changes that need to be cleared out) + // TODO do the baseline check here (collect changes that need to be cleared out) changeset.add(changeCommandFactory.createImproperlyRemovedWarning(baseline)) } } diff --git a/obevo-core/src/main/java/org/apache/commons/configuration2/FixedAbstractYAMLBasedConfiguration.java b/obevo-core/src/main/java/org/apache/commons/configuration2/FixedAbstractYAMLBasedConfiguration.java index 2bfcbad0..5f4347cd 100644 --- a/obevo-core/src/main/java/org/apache/commons/configuration2/FixedAbstractYAMLBasedConfiguration.java +++ b/obevo-core/src/main/java/org/apache/commons/configuration2/FixedAbstractYAMLBasedConfiguration.java @@ -123,7 +123,7 @@ private ImmutableNode constructHierarchy(ImmutableNode.Builder parent, constructHierarchy(subtree, (Map) value); parent.addChild(children); } - // Shant added this fix for the Collection block + // DEVELOPER NOTE - this is the sectino modified by the Obevo developers to add the Collection block else if (value instanceof Collection) { boolean areAllChildConfigs = true; diff --git a/obevo-db-impls/obevo-db-db2/src/main/java/com/gs/obevo/db/impl/platforms/db2/Db2ToInMemorySqlTranslator.java b/obevo-db-impls/obevo-db-db2/src/main/java/com/gs/obevo/db/impl/platforms/db2/Db2ToInMemorySqlTranslator.java index 31a3af87..c5c7b7ba 100644 --- a/obevo-db-impls/obevo-db-db2/src/main/java/com/gs/obevo/db/impl/platforms/db2/Db2ToInMemorySqlTranslator.java +++ b/obevo-db-impls/obevo-db-db2/src/main/java/com/gs/obevo/db/impl/platforms/db2/Db2ToInMemorySqlTranslator.java @@ -1,81 +1,81 @@ -/** - * Copyright 2017 Goldman Sachs. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package com.gs.obevo.db.impl.platforms.db2; - -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import com.gs.obevo.api.appdata.ChangeInput; -import com.gs.obevo.db.impl.platforms.sqltranslator.PostColumnSqlTranslator; -import com.gs.obevo.db.impl.platforms.sqltranslator.PostParsedSqlTranslator; -import com.gs.obevo.db.impl.platforms.sqltranslator.UnparsedSqlTranslator; -import com.gs.obevo.db.sqlparser.syntaxparser.CreateTable; -import com.gs.obevo.db.sqlparser.syntaxparser.CreateTableColumn; -import org.eclipse.collections.api.list.ImmutableList; -import org.eclipse.collections.impl.factory.Lists; - -public class Db2ToInMemorySqlTranslator implements PostColumnSqlTranslator, PostParsedSqlTranslator, UnparsedSqlTranslator { - private final Pattern defaultPattern = Pattern.compile("(?i)((?:not\\s+)?null)\\s+default\\s+(.*)"); - - // SHANT add test cases for this in the integration test - // these are allowable by db2 (i.e. to use dots instead of colons), but HSQL does not - public static final ImmutableList ACCEPTED_DATE_FORMATS = Lists.immutable.with( - "yyyy-MM-dd-HH.mm.ss.SSS", - "yyyy-MM-dd HH.mm.ss.SSS" - ); - - static final Pattern identityPattern = - Pattern.compile("(?i)\\bgenerated\\s+(.*)as\\s+identity\\s*(\\(.*\\))?"); - - private final Pattern loggedPattern = - Pattern.compile("(?i)(not\\s+)?\\blogged\\b"); - private final Pattern compactPattern = - Pattern.compile("(?i)(not\\s+)?\\bcompact\\b"); - - @Override - public String handlePostColumnText(String postColumnText, CreateTableColumn column, CreateTable table) { - // default clause seems to require a reversal in HSQL - only for DB2? - Matcher defaultMatcher = this.defaultPattern.matcher(postColumnText); - while (defaultMatcher.find()) { - String nullClause = defaultMatcher.group(1); - String defaultClause = defaultMatcher.group(2); - postColumnText = defaultMatcher.replaceFirst("DEFAULT " + defaultClause + " " + nullClause); - defaultMatcher = this.defaultPattern.matcher(postColumnText); - } - - Matcher loggedMatcher = this.loggedPattern.matcher(postColumnText); - if (loggedMatcher.find()) { - postColumnText = loggedMatcher.replaceAll(" "); - } - Matcher compactMatcher = this.compactPattern.matcher(postColumnText); - if (compactMatcher.find()) { - postColumnText = compactMatcher.replaceAll(" "); - } - - return postColumnText; - } - - @Override - public String handleAnySqlPostTranslation(String string, ChangeInput change) { - return string.replaceAll("(?i)current\\s+timestamp", "current_timestamp"); - } - - @Override - public String handleRawFullSql(String string, ChangeInput change) { - // filter out specific db2 system calls like reorg - return string.replaceAll("(?i)CALL\\s+SYSPROC.*", ""); - } -} +/** + * Copyright 2017 Goldman Sachs. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.gs.obevo.db.impl.platforms.db2; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import com.gs.obevo.api.appdata.ChangeInput; +import com.gs.obevo.db.impl.platforms.sqltranslator.PostColumnSqlTranslator; +import com.gs.obevo.db.impl.platforms.sqltranslator.PostParsedSqlTranslator; +import com.gs.obevo.db.impl.platforms.sqltranslator.UnparsedSqlTranslator; +import com.gs.obevo.db.sqlparser.syntaxparser.CreateTable; +import com.gs.obevo.db.sqlparser.syntaxparser.CreateTableColumn; +import org.eclipse.collections.api.list.ImmutableList; +import org.eclipse.collections.impl.factory.Lists; + +public class Db2ToInMemorySqlTranslator implements PostColumnSqlTranslator, PostParsedSqlTranslator, UnparsedSqlTranslator { + private final Pattern defaultPattern = Pattern.compile("(?i)((?:not\\s+)?null)\\s+default\\s+(.*)"); + + // TODO add test cases for this in the integration test + // these are allowable by db2 (i.e. to use dots instead of colons), but HSQL does not + public static final ImmutableList ACCEPTED_DATE_FORMATS = Lists.immutable.with( + "yyyy-MM-dd-HH.mm.ss.SSS", + "yyyy-MM-dd HH.mm.ss.SSS" + ); + + static final Pattern identityPattern = + Pattern.compile("(?i)\\bgenerated\\s+(.*)as\\s+identity\\s*(\\(.*\\))?"); + + private final Pattern loggedPattern = + Pattern.compile("(?i)(not\\s+)?\\blogged\\b"); + private final Pattern compactPattern = + Pattern.compile("(?i)(not\\s+)?\\bcompact\\b"); + + @Override + public String handlePostColumnText(String postColumnText, CreateTableColumn column, CreateTable table) { + // default clause seems to require a reversal in HSQL - only for DB2? + Matcher defaultMatcher = this.defaultPattern.matcher(postColumnText); + while (defaultMatcher.find()) { + String nullClause = defaultMatcher.group(1); + String defaultClause = defaultMatcher.group(2); + postColumnText = defaultMatcher.replaceFirst("DEFAULT " + defaultClause + " " + nullClause); + defaultMatcher = this.defaultPattern.matcher(postColumnText); + } + + Matcher loggedMatcher = this.loggedPattern.matcher(postColumnText); + if (loggedMatcher.find()) { + postColumnText = loggedMatcher.replaceAll(" "); + } + Matcher compactMatcher = this.compactPattern.matcher(postColumnText); + if (compactMatcher.find()) { + postColumnText = compactMatcher.replaceAll(" "); + } + + return postColumnText; + } + + @Override + public String handleAnySqlPostTranslation(String string, ChangeInput change) { + return string.replaceAll("(?i)current\\s+timestamp", "current_timestamp"); + } + + @Override + public String handleRawFullSql(String string, ChangeInput change) { + // filter out specific db2 system calls like reorg + return string.replaceAll("(?i)CALL\\s+SYSPROC.*", ""); + } +} diff --git a/obevo-db-impls/obevo-db-postgresql/dbviewer.sh b/obevo-db-impls/obevo-db-postgresql/dbviewer.sh new file mode 100755 index 00000000..bd8a46f5 --- /dev/null +++ b/obevo-db-impls/obevo-db-postgresql/dbviewer.sh @@ -0,0 +1,37 @@ +#!/bin/bash +# +# Copyright 2017 Goldman Sachs. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +CONTAINER_NAME=obevo-pgadmin-instance + +RUNNING_CONTAINER_ID=$(docker ps -aqf "name=$CONTAINER_NAME") +if [[ ! -z "$RUNNING_CONTAINER_ID" ]] +then + echo "Shutting down old container" + docker stop $RUNNING_CONTAINER_ID + docker rm $RUNNING_CONTAINER_ID +fi + +PGADMIN_PORT=8080 +PGADMIN_EMAIL="katadeployer@obevo-kata.com" +PGADMIN_PASSWORD="katadeploypass" +docker run --name $CONTAINER_NAME -p 8080:80 -e "PGADMIN_DEFAULT_EMAIL=$PGADMIN_EMAIL" -e "PGADMIN_DEFAULT_PASSWORD=$PGADMIN_PASSWORD" -d dpage/pgadmin4 + +echo "" +echo "pgadmin4 setup successful" +echo "" +echo "Please visit http://localhost:8080 w/ username = $PGADMIN_EMAIL and password as $PGADMIN_PASSWORD to access the page" + diff --git a/obevo-db-impls/obevo-db-postgresql/getIpForDbviewer.sh b/obevo-db-impls/obevo-db-postgresql/getIpForDbviewer.sh new file mode 100755 index 00000000..d7d3cc98 --- /dev/null +++ b/obevo-db-impls/obevo-db-postgresql/getIpForDbviewer.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# +# Copyright 2017 Goldman Sachs. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +CONTAINER_NAME=obevo-postgresql-instance + +RUNNING_CONTAINER_ID=$(docker ps -aqf "name=$CONTAINER_NAME") + + +if [[ ! -z "$RUNNING_CONTAINER_ID" ]] +then + docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $RUNNING_CONTAINER_ID +else + echo "Container is not running" + exit 1 +fi diff --git a/obevo-db-impls/obevo-db-postgresql/src/main/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlLock.kt b/obevo-db-impls/obevo-db-postgresql/src/main/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlLock.kt new file mode 100644 index 00000000..b8b1a35d --- /dev/null +++ b/obevo-db-impls/obevo-db-postgresql/src/main/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlLock.kt @@ -0,0 +1,56 @@ +/** + * Copyright 2017 Goldman Sachs. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.gs.obevo.db.impl.platforms.postgresql + +import com.gs.obevo.api.platform.AuditLock +import com.gs.obevo.db.impl.core.jdbc.JdbcHelper +import org.apache.commons.dbutils.handlers.ScalarHandler +import org.slf4j.LoggerFactory +import java.sql.Connection +import java.time.Duration + +class PostgreSqlLock internal constructor( + private val jdbc: JdbcHelper, + private val conn: Connection +) : AuditLock { + private val defaultRetryDelay = Duration.ofSeconds(5L) + private val lockId = 5749832 // using random integer as the lock ID for the pg advisory lock to prevent collisions with others + + override fun lock() { + var lockAcquired = false + + while (!lockAcquired) { + val sql = "SELECT pg_try_advisory_lock($lockId)" + LOG.info("Attempting to acquire Postgres server lock via {}", sql) + lockAcquired = jdbc.query(conn, sql, ScalarHandler()) + + if (!lockAcquired) { + LOG.info("Lock not yet available; waiting for {} seconds", defaultRetryDelay.seconds) + + Thread.sleep(defaultRetryDelay.toMillis()) + } + } + } + + override fun unlock() { + val lockReleased = jdbc.query(conn, "SELECT pg_advisory_unlock($lockId)", ScalarHandler()) + LOG.info("Postgres lock has been {} released", if (lockReleased) "successfully" else "unsuccessfully") + } + + companion object { + private val LOG = LoggerFactory.getLogger(PostgreSqlLock::class.java) + } +} diff --git a/obevo-db-impls/obevo-db-postgresql/src/main/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlSqlExecutor.java b/obevo-db-impls/obevo-db-postgresql/src/main/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlSqlExecutor.java index ceb15e2f..16c395b2 100644 --- a/obevo-db-impls/obevo-db-postgresql/src/main/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlSqlExecutor.java +++ b/obevo-db-impls/obevo-db-postgresql/src/main/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlSqlExecutor.java @@ -1,44 +1,55 @@ -/** - * Copyright 2017 Goldman Sachs. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package com.gs.obevo.db.impl.platforms.postgresql; - -import java.sql.Connection; - -import javax.sql.DataSource; - -import com.gs.obevo.api.appdata.PhysicalSchema; -import com.gs.obevo.db.impl.core.jdbc.JdbcHelper; -import com.gs.obevo.db.impl.platforms.AbstractSqlExecutor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class PostgreSqlSqlExecutor extends AbstractSqlExecutor { - private static final Logger LOG = LoggerFactory.getLogger(PostgreSqlSqlExecutor.class); - - public PostgreSqlSqlExecutor(DataSource ds) { - super(ds); - } - - @Override - public void setDataSourceSchema(Connection conn, PhysicalSchema schema) { - // NOTE - SET SCHEMA 'schemaName' (with quotes) is only effective for PostgreSQL versions >= 8.4 - // For 8.3, we must use SET search_path TO schemaName (without quotes) - // This is compatible w/ future versions as well; hence, we keep it - // (unfortunately, can't easily bring up version 8.3 on an app server environment for easy testing) - JdbcHelper jdbc = this.getJdbcTemplate(); - jdbc.update(conn, "SET search_path TO " + schema.getPhysicalName()); - } -} +/** + * Copyright 2017 Goldman Sachs. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.gs.obevo.db.impl.platforms.postgresql; + +import java.sql.Connection; +import java.sql.SQLException; + +import javax.sql.DataSource; + +import com.gs.obevo.api.appdata.PhysicalSchema; +import com.gs.obevo.api.platform.AuditLock; +import com.gs.obevo.db.impl.core.jdbc.JdbcHelper; +import com.gs.obevo.db.impl.platforms.AbstractSqlExecutor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class PostgreSqlSqlExecutor extends AbstractSqlExecutor { + private static final Logger LOG = LoggerFactory.getLogger(PostgreSqlSqlExecutor.class); + + public PostgreSqlSqlExecutor(DataSource ds) { + super(ds); + } + + @Override + public void setDataSourceSchema(Connection conn, PhysicalSchema schema) { + // NOTE - SET SCHEMA 'schemaName' (with quotes) is only effective for PostgreSQL versions >= 8.4 + // For 8.3, we must use SET search_path TO schemaName (without quotes) + // This is compatible w/ future versions as well; hence, we keep it + // (unfortunately, can't easily bring up version 8.3 on an app server environment for easy testing) + JdbcHelper jdbc = this.getJdbcTemplate(); + jdbc.update(conn, "SET search_path TO " + schema.getPhysicalName()); + } + + @Override + public AuditLock lock(Connection conn) { + try { + return new PostgreSqlLock(this.getJdbcTemplate(), getDs().getConnection()); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } +} diff --git a/obevo-db-impls/obevo-db-postgresql/src/test/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlDeployerIT.java b/obevo-db-impls/obevo-db-postgresql/src/test/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlDeployerIT.java index f9de46fd..94b2e82a 100644 --- a/obevo-db-impls/obevo-db-postgresql/src/test/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlDeployerIT.java +++ b/obevo-db-impls/obevo-db-postgresql/src/test/java/com/gs/obevo/db/impl/platforms/postgresql/PostgreSqlDeployerIT.java @@ -19,6 +19,10 @@ import java.sql.Timestamp; import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.function.Function; import javax.sql.DataSource; @@ -26,6 +30,7 @@ import com.gs.obevo.db.impl.core.jdbc.JdbcHelper; import org.apache.commons.dbutils.DbUtils; import org.eclipse.collections.api.block.function.primitive.IntToObjectFunction; +import org.eclipse.collections.impl.factory.Lists; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -49,14 +54,31 @@ public PostgreSqlDeployerIT(IntToObjectFunction getAppCont @Test public void testDeploy() throws Exception { - getAppContext.valueOf(1) + DbDeployerAppContext dbDeployerAppContext = getAppContext.valueOf(1) .setupEnvInfra() .setupEnvInfra() - .cleanEnvironment() - .deploy(); + .cleanEnvironment(); + + Function threadInvoker = threadNumber -> { + System.out.println("DEPLOY THREAD " + threadNumber); + getAppContext.valueOf(1).deploy(); + return null; + }; + + // Invoke the jobs in parallel to ensure that the postgresql locking works; only one deploy should go through, + // whereas the others will become no-ops + ExecutorService executorService = Executors.newFixedThreadPool(3); + List> futures = executorService.invokeAll(Lists.mutable.of( + () -> threadInvoker.apply(1), + () -> threadInvoker.apply(2), + () -> threadInvoker.apply(3) + )); + for (Future future : futures) { + future.get(); + } // ensuring that we can modify - DbDeployerAppContext dbDeployerAppContext = getAppContext.valueOf(2); + dbDeployerAppContext = getAppContext.valueOf(2); dbDeployerAppContext .setupEnvInfra() .deploy(); diff --git a/obevo-db/src/main/java/com/gs/obevo/db/api/platform/SqlExecutor.java b/obevo-db/src/main/java/com/gs/obevo/db/api/platform/SqlExecutor.java index f0697cdb..84462e1f 100644 --- a/obevo-db/src/main/java/com/gs/obevo/db/api/platform/SqlExecutor.java +++ b/obevo-db/src/main/java/com/gs/obevo/db/api/platform/SqlExecutor.java @@ -18,9 +18,11 @@ import java.sql.Connection; import com.gs.obevo.api.appdata.PhysicalSchema; +import com.gs.obevo.api.platform.AuditLock; import com.gs.obevo.db.impl.core.jdbc.JdbcHelper; import com.gs.obevo.dbmetadata.api.DbMetadataManager; import com.gs.obevo.impl.ExecuteChangeCommand; +import com.gs.obevo.impl.changeauditdao.InMemLock; import org.eclipse.collections.api.block.procedure.Procedure; import org.eclipse.collections.impl.block.function.checked.ThrowingFunction; @@ -64,4 +66,8 @@ public interface SqlExecutor { */ @Deprecated void performExtraCleanOperation(ExecuteChangeCommand command, DbMetadataManager metaDataMgr); + + default AuditLock lock(Connection conn) { + return new InMemLock(); + } } diff --git a/obevo-db/src/main/java/com/gs/obevo/db/impl/core/changeauditdao/NoOpChangeAuditDao.java b/obevo-db/src/main/java/com/gs/obevo/db/impl/core/changeauditdao/NoOpChangeAuditDao.java index 3fd42516..f7c2b488 100644 --- a/obevo-db/src/main/java/com/gs/obevo/db/impl/core/changeauditdao/NoOpChangeAuditDao.java +++ b/obevo-db/src/main/java/com/gs/obevo/db/impl/core/changeauditdao/NoOpChangeAuditDao.java @@ -1,57 +1,64 @@ -/** - * Copyright 2017 Goldman Sachs. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package com.gs.obevo.db.impl.core.changeauditdao; - -import com.gs.obevo.api.appdata.Change; -import com.gs.obevo.api.appdata.DeployExecution; -import com.gs.obevo.api.platform.ChangeAuditDao; -import org.eclipse.collections.api.list.ImmutableList; -import org.eclipse.collections.impl.factory.Lists; - -/** - * No-op used for in-memory db implementations where we don't need to keep audit. - */ -public class NoOpChangeAuditDao implements ChangeAuditDao { - @Override - public String getAuditContainerName() { - return "no-op"; - } - - @Override - public void init() { - } - - @Override - public void insertNewChange(Change change, DeployExecution deployExecution) { - } - - @Override - public void updateOrInsertChange(Change change, DeployExecution deployExecution) { - } - - @Override - public ImmutableList getDeployedChanges() { - return Lists.immutable.with(); - } - - @Override - public void deleteChange(Change change) { - } - - @Override - public void deleteObjectChanges(Change change) { - } -} +/** + * Copyright 2017 Goldman Sachs. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.gs.obevo.db.impl.core.changeauditdao; + +import com.gs.obevo.api.appdata.Change; +import com.gs.obevo.api.appdata.DeployExecution; +import com.gs.obevo.api.platform.AuditLock; +import com.gs.obevo.api.platform.ChangeAuditDao; +import com.gs.obevo.impl.changeauditdao.InMemLock; +import org.eclipse.collections.api.list.ImmutableList; +import org.eclipse.collections.impl.factory.Lists; + +/** + * No-op used for in-memory db implementations where we don't need to keep audit. + */ +public class NoOpChangeAuditDao implements ChangeAuditDao { + @Override + public String getAuditContainerName() { + return "no-op"; + } + + @Override + public void init() { + } + + @Override + public void insertNewChange(Change change, DeployExecution deployExecution) { + } + + @Override + public void updateOrInsertChange(Change change, DeployExecution deployExecution) { + } + + @Override + public ImmutableList getDeployedChanges() { + return Lists.immutable.with(); + } + + @Override + public void deleteChange(Change change) { + } + + @Override + public void deleteObjectChanges(Change change) { + } + + @Override + public AuditLock acquireLock() { + return new InMemLock(); + } +} diff --git a/obevo-db/src/main/java/com/gs/obevo/db/impl/core/changeauditdao/SameSchemaChangeAuditDao.java b/obevo-db/src/main/java/com/gs/obevo/db/impl/core/changeauditdao/SameSchemaChangeAuditDao.java index 86b77940..e68fc7a2 100644 --- a/obevo-db/src/main/java/com/gs/obevo/db/impl/core/changeauditdao/SameSchemaChangeAuditDao.java +++ b/obevo-db/src/main/java/com/gs/obevo/db/impl/core/changeauditdao/SameSchemaChangeAuditDao.java @@ -29,6 +29,7 @@ import com.gs.obevo.api.appdata.DeployExecutionStatus; import com.gs.obevo.api.appdata.ObjectKey; import com.gs.obevo.api.appdata.PhysicalSchema; +import com.gs.obevo.api.platform.AuditLock; import com.gs.obevo.api.platform.ChangeAuditDao; import com.gs.obevo.api.platform.ChangeType; import com.gs.obevo.api.platform.DeployExecutionDao; @@ -476,4 +477,9 @@ private Timestamp getCurrentTimestamp() { private String resolveColumnName(String colName) { return colName; } + + @Override + public AuditLock acquireLock() { + return sqlExecutor.executeWithinContext(env.getPhysicalSchemas().getFirst(), sqlExecutor::lock); + } } diff --git a/obevo-db/src/main/java/com/gs/obevo/db/impl/platforms/AbstractSqlExecutor.java b/obevo-db/src/main/java/com/gs/obevo/db/impl/platforms/AbstractSqlExecutor.java index 082b5190..dffec85a 100644 --- a/obevo-db/src/main/java/com/gs/obevo/db/impl/platforms/AbstractSqlExecutor.java +++ b/obevo-db/src/main/java/com/gs/obevo/db/impl/platforms/AbstractSqlExecutor.java @@ -1,107 +1,111 @@ -/** - * Copyright 2017 Goldman Sachs. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package com.gs.obevo.db.impl.platforms; - -import java.sql.Connection; - -import javax.sql.DataSource; - -import com.gs.obevo.api.appdata.PhysicalSchema; -import com.gs.obevo.api.platform.DeployerRuntimeException; -import com.gs.obevo.db.api.platform.SqlExecutor; -import com.gs.obevo.db.impl.core.jdbc.DefaultJdbcHandler; -import com.gs.obevo.db.impl.core.jdbc.JdbcHandler; -import com.gs.obevo.db.impl.core.jdbc.JdbcHelper; -import com.gs.obevo.dbmetadata.api.DbMetadataManager; -import com.gs.obevo.impl.ExecuteChangeCommand; -import org.apache.commons.dbutils.DbUtils; -import org.eclipse.collections.api.block.procedure.Procedure; -import org.eclipse.collections.impl.block.function.checked.ThrowingFunction; - -public abstract class AbstractSqlExecutor implements SqlExecutor { - private final DataSource ds; - - protected AbstractSqlExecutor(DataSource ds) { - this.ds = ds; - } - - @Override - public final JdbcHelper getJdbcTemplate() { - // Note - pmdBroken value should be false, as otherwise the CSV inserts w/ prepared statements may not work. - // If any sqls fail, then it may not be used correctly. This proved to be an annoyance w/ Sybase ASE, but - // eventually we fixed it and can now assume this is false - return createJdbcHelper(ds); - } - - /** - * Overload to facilitate creating the JdbcHelper given any datasource. - */ - public JdbcHelper createJdbcHelper(DataSource ds) { - return new JdbcHelper(this.getJdbcHandler(), this.isParameterTypeEnabled()); - } - - @Override - public void executeWithinContext(PhysicalSchema schema, Procedure runnable) { - Connection conn = null; - try { - conn = ds.getConnection(); - setDataSourceSchema(conn, schema); - runnable.value(conn); - } catch (RuntimeException e) { - throw e; // rethrowing DeployerRuntimeException to avoid excessive stack trace outputs - } catch (Exception e) { - throw new DeployerRuntimeException(e); - } finally { - DbUtils.closeQuietly(conn); - } - } - - @Override - public T executeWithinContext(PhysicalSchema schema, ThrowingFunction callable) { - Connection conn = null; - try { - conn = ds.getConnection(); - setDataSourceSchema(conn, schema); - return callable.safeValueOf(conn); - } catch (DeployerRuntimeException e) { - throw e; // rethrowing DeployerRuntimeException to avoid excessive stack trace outputs - } catch (Exception e) { - throw new DeployerRuntimeException(e); - } finally { - DbUtils.closeQuietly(conn); - } - } - - @Override - public void performExtraCleanOperation(final ExecuteChangeCommand command, final DbMetadataManager metaDataMgr) { - // as a default no special extra steps - } - - /** - * Define a call for setting the schema on the datasource. - * While most implementations can set the schema via simple SQL, Sybase IQ is an exceptional case that requires - * an actual change in the DataSource; hence, we put this behind this interface - */ - protected abstract void setDataSourceSchema(Connection conn, PhysicalSchema schema); - - protected JdbcHandler getJdbcHandler() { - return new DefaultJdbcHandler(); - } - - protected boolean isParameterTypeEnabled() { - return true; - } -} +/** + * Copyright 2017 Goldman Sachs. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.gs.obevo.db.impl.platforms; + +import java.sql.Connection; + +import javax.sql.DataSource; + +import com.gs.obevo.api.appdata.PhysicalSchema; +import com.gs.obevo.api.platform.DeployerRuntimeException; +import com.gs.obevo.db.api.platform.SqlExecutor; +import com.gs.obevo.db.impl.core.jdbc.DefaultJdbcHandler; +import com.gs.obevo.db.impl.core.jdbc.JdbcHandler; +import com.gs.obevo.db.impl.core.jdbc.JdbcHelper; +import com.gs.obevo.dbmetadata.api.DbMetadataManager; +import com.gs.obevo.impl.ExecuteChangeCommand; +import org.apache.commons.dbutils.DbUtils; +import org.eclipse.collections.api.block.procedure.Procedure; +import org.eclipse.collections.impl.block.function.checked.ThrowingFunction; + +public abstract class AbstractSqlExecutor implements SqlExecutor { + private final DataSource ds; + + protected AbstractSqlExecutor(DataSource ds) { + this.ds = ds; + } + + protected DataSource getDs() { + return ds; + } + + @Override + public final JdbcHelper getJdbcTemplate() { + // Note - pmdBroken value should be false, as otherwise the CSV inserts w/ prepared statements may not work. + // If any sqls fail, then it may not be used correctly. This proved to be an annoyance w/ Sybase ASE, but + // eventually we fixed it and can now assume this is false + return createJdbcHelper(ds); + } + + /** + * Overload to facilitate creating the JdbcHelper given any datasource. + */ + public JdbcHelper createJdbcHelper(DataSource ds) { + return new JdbcHelper(this.getJdbcHandler(), this.isParameterTypeEnabled()); + } + + @Override + public void executeWithinContext(PhysicalSchema schema, Procedure runnable) { + Connection conn = null; + try { + conn = ds.getConnection(); + setDataSourceSchema(conn, schema); + runnable.value(conn); + } catch (RuntimeException e) { + throw e; // rethrowing DeployerRuntimeException to avoid excessive stack trace outputs + } catch (Exception e) { + throw new DeployerRuntimeException(e); + } finally { + DbUtils.closeQuietly(conn); + } + } + + @Override + public T executeWithinContext(PhysicalSchema schema, ThrowingFunction callable) { + Connection conn = null; + try { + conn = ds.getConnection(); + setDataSourceSchema(conn, schema); + return callable.safeValueOf(conn); + } catch (DeployerRuntimeException e) { + throw e; // rethrowing DeployerRuntimeException to avoid excessive stack trace outputs + } catch (Exception e) { + throw new DeployerRuntimeException(e); + } finally { + DbUtils.closeQuietly(conn); + } + } + + @Override + public void performExtraCleanOperation(final ExecuteChangeCommand command, final DbMetadataManager metaDataMgr) { + // as a default no special extra steps + } + + /** + * Define a call for setting the schema on the datasource. + * While most implementations can set the schema via simple SQL, Sybase IQ is an exceptional case that requires + * an actual change in the DataSource; hence, we put this behind this interface + */ + protected abstract void setDataSourceSchema(Connection conn, PhysicalSchema schema); + + protected JdbcHandler getJdbcHandler() { + return new DefaultJdbcHandler(); + } + + protected boolean isParameterTypeEnabled() { + return true; + } +} diff --git a/obevo-db/src/main/java/com/gs/obevo/db/sqlparser/syntaxparser/UnparseVisitor.java b/obevo-db/src/main/java/com/gs/obevo/db/sqlparser/syntaxparser/UnparseVisitor.java index eea7200b..1cdd0711 100644 --- a/obevo-db/src/main/java/com/gs/obevo/db/sqlparser/syntaxparser/UnparseVisitor.java +++ b/obevo-db/src/main/java/com/gs/obevo/db/sqlparser/syntaxparser/UnparseVisitor.java @@ -1,553 +1,553 @@ -/** - * Copyright 2017 Goldman Sachs. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package com.gs.obevo.db.sqlparser.syntaxparser; - -import java.io.PrintStream; -import java.util.ArrayList; -import java.util.List; -import java.util.Stack; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class UnparseVisitor implements SqlParserVisitor { - private static final Logger LOG = LoggerFactory.getLogger(UnparseVisitor.class); - - private final PrintStream out; - private CreateIndex createIndex; - private AlterTableDrop alterTableDrop; - - public UnparseVisitor(PrintStream o) { - this.out = o; - } - - private static final boolean debug = false; - - private Object print(SimpleNode node, Object data) { - if (debug) { - this.out.println(">>>> Calling print from " + node.getClass() + " ***"); - } - Token t1 = node.getFirstToken(); - Token t = new Token(); - t.next = t1; - - SimpleNode n; - for (int ord = 0; ord < node.jjtGetNumChildren(); ord++) { - n = (SimpleNode) node.jjtGetChild(ord); - while (true) { - t = t.next; - if (t == n.getFirstToken()) { - break; - } - this.print(t); - } - n.jjtAccept(this, data); - t = n.getLastToken(); - } - - while (t != node.getLastToken()) { - t = t.next; - this.print(t); - } - if (debug) { - this.out.println("<< Exiting print for " + node.getClass() + " ***"); - } - return data; - } - - private void print(Token t) { - Token tt = t.specialToken; - if (tt != null) { - while (tt.specialToken != null) { - tt = tt.specialToken; - } - while (tt != null) { - // To preserve the whitespace - switch (tt.kind) { - case SqlParserConstants.FORMAL_COMMENT: - case SqlParserConstants.MULTI_LINE_COMMENT: - case SqlParserConstants.SINGLE_LINE_COMMENT: - break; - default: - this.currentSb().append(this.addUnicodeEscapes(tt.image)); - } - tt = tt.next; - } - } - - // To remove comments from the output - switch (t.kind) { - case SqlParserConstants.FORMAL_COMMENT: - case SqlParserConstants.MULTI_LINE_COMMENT: - case SqlParserConstants.SINGLE_LINE_COMMENT: - case SqlParserConstants.IN_FORMAL_COMMENT: - case SqlParserConstants.IN_MULTI_LINE_COMMENT: - case SqlParserConstants.IN_SINGLE_LINE_COMMENT: - break; - default: - this.currentSb().append(this.addUnicodeEscapes(t.image)); - } - - if (debug) { - this.out.print("\n"); // shant added - } - } - - private StringBuilder currentSb() { - if (this.sbs.isEmpty()) { - return new StringBuilder(); // essentially writing to null - } else { - return this.sbs.peek(); - } - } - - private String addUnicodeEscapes(String str) { - String retval = ""; - char ch; - for (int i = 0; i < str.length(); i++) { - ch = str.charAt(i); - if ((ch < 0x20 || ch > 0x7e) && - ch != '\t' && ch != '\n' && ch != '\r' && ch != '\f') { - String s = "0000" + Integer.toString(ch, 16); - retval += "\\u" + s.substring(s.length() - 4, s.length()); - } else { - retval += ch; - } - } - return retval; - } - - @Override - public Object visit(SimpleNode node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTCompilationUnit node, Object data) { - return this.print(node, data); - } - - private CreateTable createTable; - - public CreateTable getCreateTable() { - return this.createTable; - } - - private CreateTableColumn createTableColumn; - - @Override - public Object visit(ASTCreateTable node, Object data) { - this.createTable = new CreateTable(); - Object obj = this.print(node, data); - this.createTable.setName(this.tableName); - this.createTable.setPostTableCreateText(this.postCreateObjectClauses); - this.createTable.getConstraints().addAll(this.constraints); - return obj; - } - - private String tableName = null; - - @Override - public Object visit(ASTTableName node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.tableName = sb.toString().trim(); - return obj; - } - - private String dropObjectName = null; - - @Override - public Object visit(ASTDropObjectName node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.dropObjectName = sb.toString().trim(); - return obj; - } - - @Override - public Object visit(ASTCreateTableColumnList node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTCreateTableColumn node, Object data) { - this.createTableColumn = new CreateTableColumn(); - Object obj = this.print(node, data); - this.createTableColumn.setName(this.columnName); - this.createTableColumn.setPostColumnText(this.postColumnClauses); - - this.createTable.getColumns().add(this.createTableColumn); - this.createTableColumn = null; - this.postColumnClauses = null; - this.columnName = null; - return obj; - } - - @Override - public Object visit(ASTDataType node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - LOG.trace("We only pop the string off the stack; will not use the ASTDataType string: {}", sb); - this.createTableColumn.getType().setTypeName(this.dataTypeName); - return obj; - } - - @Override - public Object visit(ASTDataTypeLenList node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.createTableColumn.getType().setTypeParams(sb.toString().trim()); - return obj; - } - - private final Stack sbs = new Stack(); - - private String columnName; - - @Override - public Object visit(ASTColumnName node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.columnName = sb.toString().trim(); - return obj; - } - - private String dataTypeName; - - @Override - public Object visit(ASTDataTypeName node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.dataTypeName = sb.toString().trim(); - return obj; - } - - @Override - public Object visit(ASTCreateStatement node, Object data) { - return this.print(node, data); - } - - public CreateIndex getCreateIndex() { - return this.createIndex; - } - - @Override - public Object visit(ASTCreateIndex node, Object data) { - this.createIndex = new CreateIndex(); - Object obj = this.print(node, data); - this.createIndex.setName(this.indexName); - this.createIndex.setUnique(this.uniqueIndex); - this.createIndex.setClusterClause(this.clusterClause); - this.createIndex.setIndexQualifier(this.indexQualifier); - this.createIndex.setTableName(this.tableName); - this.createIndex.setColumns(this.indexColumnList); - this.createIndex.setPostCreateObjectClauses(this.postCreateObjectClauses); - return obj; - } - - private boolean uniqueIndex; - - @Override - public Object visit(ASTUnique node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - LOG.trace("We only pop the string off the stack; will not use the ASTUnique string: {}", sb); - this.uniqueIndex = true; - return obj; - } - - private String indexName; - - @Override - public Object visit(ASTIndexName node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.indexName = sb.toString().trim(); - return obj; - } - - private String indexQualifier; - - @Override - public Object visit(ASTIndexQualifier node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.indexQualifier = sb.toString().trim(); - return obj; - } - - private String clusterClause; - - @Override - public Object visit(ASTClusterClause node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.clusterClause = sb.toString().trim(); - return obj; - } - - @Override - public Object visit(ASTCreateTableEnd node, Object data) { - return this.print(node, data); - } - - private String postCreateObjectClauses; - - @Override - public Object visit(ASTPostObjectTableClauses node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.postCreateObjectClauses = sb.toString().trim(); - return obj; - } - - private String indexColumnList; - - @Override - public Object visit(ASTIndexColumnList node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.indexColumnList = sb.toString().trim(); - return obj; - } - - private String postConstraintClauses; - - @Override - public Object visit(ASTPostConstraintClauses node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.postConstraintClauses = sb.toString().trim(); - return obj; - } - - private String postColumnClauses; - - @Override - public Object visit(ASTPostColumnClauses node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.postColumnClauses = sb.toString().trim(); - return obj; - } - - @Override - public Object visit(ASTIdentifierName node, Object data) { - return this.print(node, data); - } - - private String constraintName; - - @Override - public Object visit(ASTConstraintName node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.constraintName = sb.toString().trim(); - return obj; - } - - private String objectType; - - @Override - public Object visit(ASTDropObjectType node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - this.objectType = sb.toString().trim(); - return obj; - } - - private DropStatement dropStatement; - - @Override - public Object visit(ASTDropStatement node, Object data) { - Object obj = this.print(node, data); - this.dropStatement = new DropStatement(); - this.dropStatement.setObjectType(this.objectType); - this.dropStatement.setObjectName(this.dropObjectName); - this.dropStatement.setPostDropClauses(this.postCreateObjectClauses); - return obj; - } - - public DropStatement getDropStatement() { - return this.dropStatement; - } - - private final List constraints = new ArrayList(); - - @Override - public Object visit(ASTConstraintClause node, Object data) { - Object obj = this.print(node, data); - NamedConstraint constraint = new NamedConstraint(); - constraint.setName(this.constraintName); - - Constraint currentConstraint = this.constraints.get(this.constraints.size() - 1); - if (currentConstraint.getRawText() == null) { - constraint.setPostObjectClauses(currentConstraint.getPostObjectClauses()); - constraint.setColumns(currentConstraint.getColumns()); - constraint.setClusteredClause(currentConstraint.getClusteredClause()); - constraint.setType(currentConstraint.getType()); - } else { - constraint.setRawText(currentConstraint.getRawText()); - } - - this.constraints.set(this.constraints.size() - 1, constraint); - return obj; - } - - @Override - public Object visit(ASTPrimaryKeyClause node, Object data) { - this.clusterClause = null; - this.indexColumnList = null; - this.postCreateObjectClauses = null; - Object obj = this.print(node, data); - Constraint currentConstraint = new Constraint(); - currentConstraint.setType("PRIMARY KEY"); - currentConstraint.setClusteredClause(this.clusterClause); - currentConstraint.setColumns(this.indexColumnList); - currentConstraint.setPostObjectClauses(this.postConstraintClauses); - - this.constraints.add(currentConstraint); - return obj; - } - - @Override - public Object visit(ASTUniqueClause node, Object data) { - this.clusterClause = null; - this.indexColumnList = null; - this.postCreateObjectClauses = null; - Object obj = this.print(node, data); - Constraint currentConstraint = new Constraint(); - currentConstraint.setType("UNIQUE"); - currentConstraint.setClusteredClause(this.clusterClause); - currentConstraint.setColumns(this.indexColumnList); - currentConstraint.setPostObjectClauses(this.postConstraintClauses); - - this.constraints.add(currentConstraint); - return obj; - } - - @Override - public Object visit(ASTOtherConstraintClause node, Object data) { - this.sbs.push(new StringBuilder()); - Object obj = this.print(node, data); - StringBuilder sb = this.sbs.pop(); - - Constraint currentConstraint = new Constraint(); - currentConstraint.setRawText(sb.toString().trim()); - - this.constraints.add(currentConstraint); - return obj; - } - - @Override - public Object visit(ASTExpression node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTSimpleExpression node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTExpressionList node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTNestedExpression node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTNestedExpressionList node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTNoCommaSimpleExpression node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTNoCommaExpressionList node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTNoCommaExpression node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTNoCommaNestedExpression node, Object data) { - return this.print(node, data); - } - - @Override - public Object visit(ASTNoCommaNestedExpressionList node, Object data) { - return this.print(node, data); - } - - private AlterTableAdd alterTableAdd; - - public AlterTableAdd getAlterTableAdd() { - return this.alterTableAdd; - } - - @Override - public Object visit(ASTAlterStatement node, Object data) { - Object obj = this.print(node, data); - if (this.alterTableAdd == null) { - this.alterTableDrop = new AlterTableDrop(); - this.alterTableDrop.setTableName(this.tableName); - this.alterTableDrop.setDropStatement(this.dropStatement); - } - // the rest is processed in ASTAlterTableAdd - may refactor at some point - return obj; - } - - public AlterTableDrop getAlterTableDrop() { - return this.alterTableDrop; - } - - @Override - public Object visit(ASTAlterTableAdd node, Object data) { - this.alterTableAdd = new AlterTableAdd(); - Object obj = this.print(node, data); - this.alterTableAdd.setTableName(this.tableName); - this.alterTableAdd.setConstraint((NamedConstraint) this.constraints.get(0)); - return obj; - } -} - +/** + * Copyright 2017 Goldman Sachs. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.gs.obevo.db.sqlparser.syntaxparser; + +import java.io.PrintStream; +import java.util.ArrayList; +import java.util.List; +import java.util.Stack; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class UnparseVisitor implements SqlParserVisitor { + private static final Logger LOG = LoggerFactory.getLogger(UnparseVisitor.class); + + private final PrintStream out; + private CreateIndex createIndex; + private AlterTableDrop alterTableDrop; + + public UnparseVisitor(PrintStream o) { + this.out = o; + } + + private static final boolean debug = false; + + private Object print(SimpleNode node, Object data) { + if (debug) { + this.out.println(">>>> Calling print from " + node.getClass() + " ***"); + } + Token t1 = node.getFirstToken(); + Token t = new Token(); + t.next = t1; + + SimpleNode n; + for (int ord = 0; ord < node.jjtGetNumChildren(); ord++) { + n = (SimpleNode) node.jjtGetChild(ord); + while (true) { + t = t.next; + if (t == n.getFirstToken()) { + break; + } + this.print(t); + } + n.jjtAccept(this, data); + t = n.getLastToken(); + } + + while (t != node.getLastToken()) { + t = t.next; + this.print(t); + } + if (debug) { + this.out.println("<< Exiting print for " + node.getClass() + " ***"); + } + return data; + } + + private void print(Token t) { + Token tt = t.specialToken; + if (tt != null) { + while (tt.specialToken != null) { + tt = tt.specialToken; + } + while (tt != null) { + // To preserve the whitespace + switch (tt.kind) { + case SqlParserConstants.FORMAL_COMMENT: + case SqlParserConstants.MULTI_LINE_COMMENT: + case SqlParserConstants.SINGLE_LINE_COMMENT: + break; + default: + this.currentSb().append(this.addUnicodeEscapes(tt.image)); + } + tt = tt.next; + } + } + + // To remove comments from the output + switch (t.kind) { + case SqlParserConstants.FORMAL_COMMENT: + case SqlParserConstants.MULTI_LINE_COMMENT: + case SqlParserConstants.SINGLE_LINE_COMMENT: + case SqlParserConstants.IN_FORMAL_COMMENT: + case SqlParserConstants.IN_MULTI_LINE_COMMENT: + case SqlParserConstants.IN_SINGLE_LINE_COMMENT: + break; + default: + this.currentSb().append(this.addUnicodeEscapes(t.image)); + } + + if (debug) { + this.out.print("\n"); // DEVELOPER NOTE - this line was added after the code generation by Obevo developers + } + } + + private StringBuilder currentSb() { + if (this.sbs.isEmpty()) { + return new StringBuilder(); // essentially writing to null + } else { + return this.sbs.peek(); + } + } + + private String addUnicodeEscapes(String str) { + String retval = ""; + char ch; + for (int i = 0; i < str.length(); i++) { + ch = str.charAt(i); + if ((ch < 0x20 || ch > 0x7e) && + ch != '\t' && ch != '\n' && ch != '\r' && ch != '\f') { + String s = "0000" + Integer.toString(ch, 16); + retval += "\\u" + s.substring(s.length() - 4, s.length()); + } else { + retval += ch; + } + } + return retval; + } + + @Override + public Object visit(SimpleNode node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTCompilationUnit node, Object data) { + return this.print(node, data); + } + + private CreateTable createTable; + + public CreateTable getCreateTable() { + return this.createTable; + } + + private CreateTableColumn createTableColumn; + + @Override + public Object visit(ASTCreateTable node, Object data) { + this.createTable = new CreateTable(); + Object obj = this.print(node, data); + this.createTable.setName(this.tableName); + this.createTable.setPostTableCreateText(this.postCreateObjectClauses); + this.createTable.getConstraints().addAll(this.constraints); + return obj; + } + + private String tableName = null; + + @Override + public Object visit(ASTTableName node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.tableName = sb.toString().trim(); + return obj; + } + + private String dropObjectName = null; + + @Override + public Object visit(ASTDropObjectName node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.dropObjectName = sb.toString().trim(); + return obj; + } + + @Override + public Object visit(ASTCreateTableColumnList node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTCreateTableColumn node, Object data) { + this.createTableColumn = new CreateTableColumn(); + Object obj = this.print(node, data); + this.createTableColumn.setName(this.columnName); + this.createTableColumn.setPostColumnText(this.postColumnClauses); + + this.createTable.getColumns().add(this.createTableColumn); + this.createTableColumn = null; + this.postColumnClauses = null; + this.columnName = null; + return obj; + } + + @Override + public Object visit(ASTDataType node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + LOG.trace("We only pop the string off the stack; will not use the ASTDataType string: {}", sb); + this.createTableColumn.getType().setTypeName(this.dataTypeName); + return obj; + } + + @Override + public Object visit(ASTDataTypeLenList node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.createTableColumn.getType().setTypeParams(sb.toString().trim()); + return obj; + } + + private final Stack sbs = new Stack(); + + private String columnName; + + @Override + public Object visit(ASTColumnName node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.columnName = sb.toString().trim(); + return obj; + } + + private String dataTypeName; + + @Override + public Object visit(ASTDataTypeName node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.dataTypeName = sb.toString().trim(); + return obj; + } + + @Override + public Object visit(ASTCreateStatement node, Object data) { + return this.print(node, data); + } + + public CreateIndex getCreateIndex() { + return this.createIndex; + } + + @Override + public Object visit(ASTCreateIndex node, Object data) { + this.createIndex = new CreateIndex(); + Object obj = this.print(node, data); + this.createIndex.setName(this.indexName); + this.createIndex.setUnique(this.uniqueIndex); + this.createIndex.setClusterClause(this.clusterClause); + this.createIndex.setIndexQualifier(this.indexQualifier); + this.createIndex.setTableName(this.tableName); + this.createIndex.setColumns(this.indexColumnList); + this.createIndex.setPostCreateObjectClauses(this.postCreateObjectClauses); + return obj; + } + + private boolean uniqueIndex; + + @Override + public Object visit(ASTUnique node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + LOG.trace("We only pop the string off the stack; will not use the ASTUnique string: {}", sb); + this.uniqueIndex = true; + return obj; + } + + private String indexName; + + @Override + public Object visit(ASTIndexName node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.indexName = sb.toString().trim(); + return obj; + } + + private String indexQualifier; + + @Override + public Object visit(ASTIndexQualifier node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.indexQualifier = sb.toString().trim(); + return obj; + } + + private String clusterClause; + + @Override + public Object visit(ASTClusterClause node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.clusterClause = sb.toString().trim(); + return obj; + } + + @Override + public Object visit(ASTCreateTableEnd node, Object data) { + return this.print(node, data); + } + + private String postCreateObjectClauses; + + @Override + public Object visit(ASTPostObjectTableClauses node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.postCreateObjectClauses = sb.toString().trim(); + return obj; + } + + private String indexColumnList; + + @Override + public Object visit(ASTIndexColumnList node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.indexColumnList = sb.toString().trim(); + return obj; + } + + private String postConstraintClauses; + + @Override + public Object visit(ASTPostConstraintClauses node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.postConstraintClauses = sb.toString().trim(); + return obj; + } + + private String postColumnClauses; + + @Override + public Object visit(ASTPostColumnClauses node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.postColumnClauses = sb.toString().trim(); + return obj; + } + + @Override + public Object visit(ASTIdentifierName node, Object data) { + return this.print(node, data); + } + + private String constraintName; + + @Override + public Object visit(ASTConstraintName node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.constraintName = sb.toString().trim(); + return obj; + } + + private String objectType; + + @Override + public Object visit(ASTDropObjectType node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + this.objectType = sb.toString().trim(); + return obj; + } + + private DropStatement dropStatement; + + @Override + public Object visit(ASTDropStatement node, Object data) { + Object obj = this.print(node, data); + this.dropStatement = new DropStatement(); + this.dropStatement.setObjectType(this.objectType); + this.dropStatement.setObjectName(this.dropObjectName); + this.dropStatement.setPostDropClauses(this.postCreateObjectClauses); + return obj; + } + + public DropStatement getDropStatement() { + return this.dropStatement; + } + + private final List constraints = new ArrayList(); + + @Override + public Object visit(ASTConstraintClause node, Object data) { + Object obj = this.print(node, data); + NamedConstraint constraint = new NamedConstraint(); + constraint.setName(this.constraintName); + + Constraint currentConstraint = this.constraints.get(this.constraints.size() - 1); + if (currentConstraint.getRawText() == null) { + constraint.setPostObjectClauses(currentConstraint.getPostObjectClauses()); + constraint.setColumns(currentConstraint.getColumns()); + constraint.setClusteredClause(currentConstraint.getClusteredClause()); + constraint.setType(currentConstraint.getType()); + } else { + constraint.setRawText(currentConstraint.getRawText()); + } + + this.constraints.set(this.constraints.size() - 1, constraint); + return obj; + } + + @Override + public Object visit(ASTPrimaryKeyClause node, Object data) { + this.clusterClause = null; + this.indexColumnList = null; + this.postCreateObjectClauses = null; + Object obj = this.print(node, data); + Constraint currentConstraint = new Constraint(); + currentConstraint.setType("PRIMARY KEY"); + currentConstraint.setClusteredClause(this.clusterClause); + currentConstraint.setColumns(this.indexColumnList); + currentConstraint.setPostObjectClauses(this.postConstraintClauses); + + this.constraints.add(currentConstraint); + return obj; + } + + @Override + public Object visit(ASTUniqueClause node, Object data) { + this.clusterClause = null; + this.indexColumnList = null; + this.postCreateObjectClauses = null; + Object obj = this.print(node, data); + Constraint currentConstraint = new Constraint(); + currentConstraint.setType("UNIQUE"); + currentConstraint.setClusteredClause(this.clusterClause); + currentConstraint.setColumns(this.indexColumnList); + currentConstraint.setPostObjectClauses(this.postConstraintClauses); + + this.constraints.add(currentConstraint); + return obj; + } + + @Override + public Object visit(ASTOtherConstraintClause node, Object data) { + this.sbs.push(new StringBuilder()); + Object obj = this.print(node, data); + StringBuilder sb = this.sbs.pop(); + + Constraint currentConstraint = new Constraint(); + currentConstraint.setRawText(sb.toString().trim()); + + this.constraints.add(currentConstraint); + return obj; + } + + @Override + public Object visit(ASTExpression node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTSimpleExpression node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTExpressionList node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTNestedExpression node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTNestedExpressionList node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTNoCommaSimpleExpression node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTNoCommaExpressionList node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTNoCommaExpression node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTNoCommaNestedExpression node, Object data) { + return this.print(node, data); + } + + @Override + public Object visit(ASTNoCommaNestedExpressionList node, Object data) { + return this.print(node, data); + } + + private AlterTableAdd alterTableAdd; + + public AlterTableAdd getAlterTableAdd() { + return this.alterTableAdd; + } + + @Override + public Object visit(ASTAlterStatement node, Object data) { + Object obj = this.print(node, data); + if (this.alterTableAdd == null) { + this.alterTableDrop = new AlterTableDrop(); + this.alterTableDrop.setTableName(this.tableName); + this.alterTableDrop.setDropStatement(this.dropStatement); + } + // the rest is processed in ASTAlterTableAdd - may refactor at some point + return obj; + } + + public AlterTableDrop getAlterTableDrop() { + return this.alterTableDrop; + } + + @Override + public Object visit(ASTAlterTableAdd node, Object data) { + this.alterTableAdd = new AlterTableAdd(); + Object obj = this.print(node, data); + this.alterTableAdd.setTableName(this.tableName); + this.alterTableAdd.setConstraint((NamedConstraint) this.constraints.get(0)); + return obj; + } +} + diff --git a/obevo-mongodb/src/main/java/com/gs/obevo/mongodb/impl/MongoDbChangeAuditDao.java b/obevo-mongodb/src/main/java/com/gs/obevo/mongodb/impl/MongoDbChangeAuditDao.java index bc4c9813..cee4b90e 100644 --- a/obevo-mongodb/src/main/java/com/gs/obevo/mongodb/impl/MongoDbChangeAuditDao.java +++ b/obevo-mongodb/src/main/java/com/gs/obevo/mongodb/impl/MongoDbChangeAuditDao.java @@ -24,8 +24,10 @@ import com.gs.obevo.api.appdata.ChangeRerunnable; import com.gs.obevo.api.appdata.DeployExecution; import com.gs.obevo.api.appdata.PhysicalSchema; +import com.gs.obevo.api.platform.AuditLock; import com.gs.obevo.api.platform.ChangeAuditDao; import com.gs.obevo.api.platform.Platform; +import com.gs.obevo.impl.changeauditdao.InMemLock; import com.gs.obevo.mongodb.api.appdata.MongoDbEnvironment; import com.gs.obevo.util.knex.InternMap; import com.mongodb.MongoClient; @@ -212,4 +214,9 @@ public void deleteObjectChanges(Change change) { Filters.eq("OBJECTNAME", change.getObjectName()) ); } + + @Override + public AuditLock acquireLock() { + return new InMemLock(); + } } diff --git a/obevo-site/src/site/markdown/app-versioning.md b/obevo-site/src/site/markdown/app-versioning.md new file mode 100644 index 00000000..82543d0c --- /dev/null +++ b/obevo-site/src/site/markdown/app-versioning.md @@ -0,0 +1,162 @@ + + +# Considerations on Users Applying Versions to their Deploymennts + +Obevo's deployment algorithm (described in detail [here](https://github.com/goldmansachs/obevo/blob/master/Obevo_Javasig.pdf) +does not require users to consider their code bases using application versions (e.g. tagging a binary using a semantic +versioning scheme like X.Y.Z, and determining deploy actions based on that). The algorithm is agnostic to that; Obevo +simply looks at a codebase (consisting of incremental and rerunnable changes) to deploy, and calculates the migration to +perform based on the difference of what is in the deploy log. It does not matter where the code base comes from, whether +from a versioned package or unnamed archive or files on the file system. + +However, users do have the option to associate their own version numbers on packages deployed via Obevo, and Obevo has +some features that are enabled when this is used. This page will go into more depth on this. + + +## Brief Review of Deploy Algorithm + +Obevo works by tracking deployed _changes_ as a deploy logic, and only incrementally deploying deltas to the target +environment. + +For example: + +Deploy V1: +* Environment Audit Log shows no changes deployed +* Source Codebase has three changes (INC1, RR2, RR3) +* Obevo calculates the changeset difference as (INC1, RR2, RR3) and thus attempts to deploy each of those +* Upon successful deployment of an individual change, it is recorded to the Environment Audit Log + +Deploy V2: +* Environment Audit Log shows three changes (INC1, RR2, RR3) deployed +* Source Codebase has three changes (INC1, RR2', INC4), with RR2' modified from the original RR2 +* To summarize the changeset: + * INC1 remains unchanged, and so that is not included in the changeset + * RR2' is modified, and so it is included in the changeset + * RR3 is removed, and so it is added to the changeset as a deletion + * INC4 is added, and so it is added to the changeset as a deletion + +How adds/drops/modifications are handled depends on the object type: +* Incremental changes (i.e. for tables) do not allow modifications and removals of deployments, except for rebaseline use cases +* Rerunnable objects, in contrast, can be modified and dropped +* Both use cases support simple additions + +In the example above, the incremental changs are named with the "INC" prefix, likewise fo rerunnable changes and the "RR" +prefix. We will use this example for the following sections. + + +## Implications on rollback + +### Situation without application versioning + +For regular deployments, note that only _additions_ of incremental changes are allowed, not removals. Thus, for typical +releases, new incremental changes tend to be added (e.g. Deploy V1 to Deploy V2) + +However, consider if a rollback situation arises (e.g. needing to re-deploy V1 after having deployed V2). In that case, +the deployment would appear as follows: + +Re-Deploy V1 from V2: +* Environment Audit Log shows three changes (INC1, RR2', INC4) deployed +* Source Codebase has three changes (INC1, RR2, RR3), with RR2 modified from the original RR2' +* To summarize the changeset: + * INC1 remains unchanged, and so that is not included in the changeset + * RR2' is modified, and so it is included in the changeset + * RR3 is added, and so it is added to the changeset + * INC4 is removed ... + +Note that the INC4 change now appears as a deletion. Normally, this change would not be allowed as this is an incremental +change. In Obevo, users can signal that a deployment is intended as a rollback using the -rollback option (see the +[Rollback Documentation](rollback.md) for more details). + +However, users may not always have the flexibility to change the deploy command to include -rollback based on the kind +of deployment tooling they have. We now detail ... + +### How Obevo's application versioning functionality can help + +In the example above, from a human perspective, it seems obvious that V2 is a later version than V1, and that going from +V2 to V1 should be a rollback. How can we tell Obevo to do this? + + +1) Specify the -productVersion \ attribute in your deploy call to have the version number stored + +``` +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion 1.2.3 +``` + +This is an optional parameter that will store the version value as a new field in the DEPLOYEXECUTION audit table. + +2) Using this field, Obevo will know that a version deployment is a rollback if: + +* A) The version deployed had previously been deployed +* B) There was another version deployed since the original version + +For example: + +``` +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion 1.0.0 # considered a regular forward deploy +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion 1.0.0 # considered a regular forward deploy (no-op in this case) +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion 2.0.0 # considered a regular forward deploy +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion 1.0.0 # rollback deploy +``` + +How does Obevo do this? By simply checking for the existence of the productVersion in the DEPLOYEXECUTION table. + +Note that the actual version naming scheme does not matter; the check is based purely on the values of the DEPLOYEXECUTION +table. + +For example: + +``` +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion myVersionA # considered a regular forward deploy +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion myVersionB # considered a regular forward deploy +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion myVersionA # rollback deploy +``` + +Given that, note that this requires deploying a previously-named version as is. As it stands today, Obevo will not +understand semantic versioning and will not automatically consider the deployment as a rollback. + +``` +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion 1.0.0 # considered a regular forward deploy +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion 2.0.0 # considered a regular forward deploy +deploy.sh DEPLOY -sourcePath /my/source/path -productVersion 1.1.0 # considered a regular forward deploy (even though version number is lower) +``` + +(It would be possible to have Obevo consider semantic versioning for rollbacks. This would have to be an opt-in argument +for users. It would not be a large build, but needs prioritization. Please raise a Github issue to inquire more) + + +### Handling Branch Releases + +Work in progress + +Starting use case: +* you deploy v1.0.0 with changes A1, B1, C1 +* then you are working on v2.0.0 in development, say with changes A1, B1, C1, D1, E1 in total + +Now say we need to release a fix in production, say to add a change A2 +* you deploy v1.0.0 with changes A1, B1, C1, A2 + +What happens the next time you deploy v2.0.0 on that database? + +If you have not incorporated A2 into your v2.0.0 branch, then the next deploy of v2.0.0 will attempt to remove A2, which +may not be correct. Hence, any time you do a release off the non-master branch, please ensure that all merges on the +release branch are merged into the master branch. + +For example: +* In v2.0.0, add A2, such as: A1, B1, C2, A2, D1, E1 + diff --git a/obevo-site/src/site/markdown/dev-setup-sybase-ase.md b/obevo-site/src/site/markdown/dev-setup-sybase-ase.md index 19694962..a9887a3d 100644 --- a/obevo-site/src/site/markdown/dev-setup-sybase-ase.md +++ b/obevo-site/src/site/markdown/dev-setup-sybase-ase.md @@ -79,7 +79,7 @@ mkdir /opt/sap/install cd /opt/sap/install cp ~/host/ase_suite.tar . -tar -xf /home/shantstepanian/host/ase_suite.tar +tar -xf /home/myuser/host/ase_suite.tar ./setup.bin