Skip to content

Commit

Permalink
Extract common const PARAM_PREFIX (#2874)
Browse files Browse the repository at this point in the history
* Extract common const PARAM_PREFIX
  • Loading branch information
ChengJie1053 authored Jul 21, 2023
1 parent 3b66622 commit 5e0ce99
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ object ConfigConst {

val DEFAULT_DATAMASK_STRING = "********"

val PARAM_PREFIX = "--"

/** about parameter... */

val KEY_APP_HOME = "app.home"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,11 @@ import scala.util.{Failure, Success, Try}

trait FlinkClientTrait extends Logger {

private[client] lazy val PARAM_KEY_FLINK_CONF = KEY_FLINK_CONF("--")
private[client] lazy val PARAM_KEY_FLINK_SQL = KEY_FLINK_SQL("--")
private[client] lazy val PARAM_KEY_APP_CONF = KEY_APP_CONF("--")
private[client] lazy val PARAM_KEY_APP_NAME = KEY_APP_NAME("--")
private[client] lazy val PARAM_KEY_FLINK_PARALLELISM = KEY_FLINK_PARALLELISM("--")
private[client] lazy val PARAM_KEY_FLINK_CONF = KEY_FLINK_CONF(PARAM_PREFIX)
private[client] lazy val PARAM_KEY_FLINK_SQL = KEY_FLINK_SQL(PARAM_PREFIX)
private[client] lazy val PARAM_KEY_APP_CONF = KEY_APP_CONF(PARAM_PREFIX)
private[client] lazy val PARAM_KEY_APP_NAME = KEY_APP_NAME(PARAM_PREFIX)
private[client] lazy val PARAM_KEY_FLINK_PARALLELISM = KEY_FLINK_PARALLELISM(PARAM_PREFIX)

@throws[Exception]
def submit(submitRequest: SubmitRequest): SubmitResponse = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/
package org.apache.streampark.flink.core

import org.apache.streampark.common.conf.ConfigConst.PARAM_PREFIX
import org.apache.streampark.common.enums.FlinkSqlValidationFailedType
import org.apache.streampark.common.util.Logger

Expand Down Expand Up @@ -417,7 +418,7 @@ case class SqlSegment(start: Int, end: Int, sql: String)

object SqlSplitter {

private lazy val singleLineCommentPrefixList = Set[String]("--")
private lazy val singleLineCommentPrefixList = Set[String](PARAM_PREFIX)

/**
* Split whole text into multiple sql statements. Two Steps: Step 1, split the whole text into
Expand Down Expand Up @@ -453,7 +454,7 @@ object SqlSplitter {
while (scanner.hasNextLine) {
lineNumber += 1
val line = scanner.nextLine().trim
val nonEmpty = line.nonEmpty && !line.startsWith("--")
val nonEmpty = line.nonEmpty && !line.startsWith(PARAM_PREFIX)
if (line.startsWith("/*")) {
startComment = true
hasComment = true
Expand Down Expand Up @@ -618,7 +619,7 @@ object SqlSplitter {
builder.toString
}

private[this] def isSingleLineComment(text: String) = text.trim.startsWith("--")
private[this] def isSingleLineComment(text: String) = text.trim.startsWith(PARAM_PREFIX)

private[this] def isMultipleLineComment(text: String) =
text.trim.startsWith("/*") && text.trim.endsWith("*/")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
*/
package org.apache.streampark.flink.core.test

import org.apache.streampark.common.conf.ConfigConst.KEY_FLINK_SQL
import org.apache.streampark.common.conf.ConfigConst.{KEY_FLINK_SQL, PARAM_PREFIX}
import org.apache.streampark.common.util.DeflaterUtils
import org.apache.streampark.flink.core.{FlinkSqlExecutor, FlinkTableInitializer, StreamTableContext}

Expand All @@ -28,7 +28,7 @@ import scala.collection.mutable.ArrayBuffer
class FlinkSqlExecuteFunSuite extends AnyFunSuite {

def execute(sql: String)(implicit func: String => Unit): Unit = {
val args = ArrayBuffer(KEY_FLINK_SQL("--"), DeflaterUtils.zipString(sql.stripMargin))
val args = ArrayBuffer(KEY_FLINK_SQL(PARAM_PREFIX), DeflaterUtils.zipString(sql.stripMargin))
val context = new StreamTableContext(FlinkTableInitializer.initialize(args.toArray, null, null))
FlinkSqlExecutor.executeSql(KEY_FLINK_SQL(), context.parameter, context)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ trait Spark extends Logger {
createOnError = value.toBoolean
argv = tail
case Nil =>
case other :: value :: tail if other.startsWith("--") =>
case other :: value :: tail if other.startsWith(PARAM_PREFIX) =>
userArgs += other.drop(2) -> value
argv = tail
case tail =>
Expand Down

0 comments on commit 5e0ce99

Please sign in to comment.