diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/AggregationFunctionTokenizer.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/AggregationFunctionTokenizer.scala index f9f7102c..fb51a44a 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/AggregationFunctionTokenizer.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/AggregationFunctionTokenizer.scala @@ -9,9 +9,9 @@ trait AggregationFunctionTokenizer { this: ClickhouseTokenizerModule => case nested: CombinedAggregatedFunction[_, _] => val tokenizedCombinators = collectCombinators(nested).map(tokenizeCombinator) val combinators = tokenizedCombinators.map(_._1).mkString("") - val combinatorsValues = tokenizedCombinators.flatMap(_._2).mkString(",") + val combinatorsValues = tokenizedCombinators.flatMap(_._2).mkString(ctx.vDelim) val (function, values) = tokenizeInnerAggregatedFunction(extractTarget(nested)) - val separator = if (values.isEmpty || combinatorsValues.isEmpty) "" else "," + val separator = if (values.isEmpty || combinatorsValues.isEmpty) "" else ctx.fDelim s"$function$combinators($values$separator$combinatorsValues)" case timeSeries: TimeSeries => tokenizeTimeSeries(timeSeries) case aggregated: AggregateFunction[_] => @@ -45,20 +45,25 @@ trait AggregationFunctionTokenizer { this: ClickhouseTokenizerModule => case LastValue(column) => ("last_value", tokenizeColumn(column)) case Median(column, level, modifier) => val (modifierName, modifierValue) = tokenizeLevelModifier(modifier) - (s"median$modifierName", s"$level)(${tokenizeColumn(column)}${modifierValue.map("," + _).getOrElse("")}") + (s"median$modifierName", s"$level)(${tokenizeColumn(column)}${modifierValue.map(ctx.vDelim + _).getOrElse("")}") case Min(tableColumn) => ("min", tokenizeColumn(tableColumn)) case Max(tableColumn) => ("max", tokenizeColumn(tableColumn)) case Quantile(column, level, modifier) => val (modifierName, modifierValue) = tokenizeLevelModifier(modifier) - (s"quantile$modifierName", s"$level)(${tokenizeColumn(column)}${modifierValue.map("," + _).getOrElse("")})") + ( + s"quantile$modifierName", + s"$level)(${tokenizeColumn(column)}${modifierValue.map(ctx.vDelim + _).getOrElse("")})" + ) case Quantiles(column, levels, modifier) => val (modifierName, modifierValue) = tokenizeLevelModifier(modifier) - (s"quantiles$modifierName", - s"${levels.mkString(",")})(${tokenizeColumn(column)}${modifierValue.map("," + _).getOrElse("")}") + ( + s"quantiles$modifierName", + s"${levels.mkString(ctx.vDelim)})(${tokenizeColumn(column)}${modifierValue.map(ctx.vDelim + _).getOrElse("")}" + ) case Sum(column, modifier) => (s"sum${tokenizeSumModifier(modifier)}", tokenizeColumn(column)) case SumMap(key, value) => (s"sumMap", tokenizeColumns(Seq(key, value))) case Uniq(columns, modifier) => - (s"uniq${tokenizeUniqModifier(modifier)}", columns.map(tokenizeColumn).mkString(",")) + (s"uniq${tokenizeUniqModifier(modifier)}", columns.map(tokenizeColumn).mkString(ctx.vDelim)) case f: AggregateFunction[_] => throw new IllegalArgumentException(s"Cannot use $f aggregated function with combinator") } diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ClickhouseTokenizerModule.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ClickhouseTokenizerModule.scala index aebb1a1c..33c69d53 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ClickhouseTokenizerModule.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/ClickhouseTokenizerModule.scala @@ -15,7 +15,8 @@ case class TokenizeContext( var joinNr: Int = 0, var tableAliases: Map[Table, String] = Map.empty, var useTableAlias: Boolean = false, - delim: String = ", " + fDelim: String = ", ", // function delimiter + vDelim: String = "," // values delimiter ) { def incrementJoinNumber(): Unit = joinNr += 1 @@ -79,7 +80,7 @@ trait ClickhouseTokenizerModule } protected def tokenizeSeqCol(columns: Column*)(implicit ctx: TokenizeContext): String = - columns.map(tokenizeColumn).mkString(", ") + columns.map(tokenizeColumn).mkString(",") override def toSql(query: InternalQuery, formatting: Option[String] = Some("JSON"))(implicit ctx: TokenizeContext diff --git a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/StringSearchFunctionTokenizer.scala b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/StringSearchFunctionTokenizer.scala index c23670e5..2f1f17f4 100644 --- a/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/StringSearchFunctionTokenizer.scala +++ b/dsl/src/main/scala/com/crobox/clickhouse/dsl/language/StringSearchFunctionTokenizer.scala @@ -24,9 +24,9 @@ trait StringSearchFunctionTokenizer { } val maybeReplaceParam = col match { - case r: StringSearchReplaceFunc => ctx.delim + tokenizeColumn(r.replace.column) + case r: StringSearchReplaceFunc => ctx.fDelim + tokenizeColumn(r.replace.column) case _ => "" } - s"$command(${tokenizeColumn(col.col1.column)}${ctx.delim}${tokenizeColumn(col.col2.column)}$maybeReplaceParam)" + s"$command(${tokenizeColumn(col.col1.column)}${ctx.fDelim}${tokenizeColumn(col.col2.column)}$maybeReplaceParam)" } } diff --git a/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/StringSearchFunctionTokenizerTest.scala b/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/StringSearchFunctionTokenizerTest.scala new file mode 100644 index 00000000..bc69c7cf --- /dev/null +++ b/dsl/src/test/scala/com/crobox/clickhouse/dsl/language/StringSearchFunctionTokenizerTest.scala @@ -0,0 +1,11 @@ +package com.crobox.clickhouse.dsl.language + +import com.crobox.clickhouse.DslTestSpec +import com.crobox.clickhouse.dsl._ + +class StringSearchFunctionTokenizerTest extends DslTestSpec { + + it should "strMatch" in { + toSQL(select(strMatch("abcd", ",")), false) should matchSQL("SELECT match('abcd', ',')") + } +}