Skip to content

Commit

Permalink
Small improvements that helps debugging SQL query differences
Browse files Browse the repository at this point in the history
  • Loading branch information
Leonard Wolters committed May 28, 2024
1 parent b444e06 commit 5d8c0ad
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ trait AggregationFunctionTokenizer { this: ClickhouseTokenizerModule =>
case nested: CombinedAggregatedFunction[_, _] =>
val tokenizedCombinators = collectCombinators(nested).map(tokenizeCombinator)
val combinators = tokenizedCombinators.map(_._1).mkString("")
val combinatorsValues = tokenizedCombinators.flatMap(_._2).mkString(",")
val combinatorsValues = tokenizedCombinators.flatMap(_._2).mkString(ctx.vDelim)
val (function, values) = tokenizeInnerAggregatedFunction(extractTarget(nested))
val separator = if (values.isEmpty || combinatorsValues.isEmpty) "" else ","
val separator = if (values.isEmpty || combinatorsValues.isEmpty) "" else ctx.fDelim
s"$function$combinators($values$separator$combinatorsValues)"
case timeSeries: TimeSeries => tokenizeTimeSeries(timeSeries)
case aggregated: AggregateFunction[_] =>
Expand Down Expand Up @@ -45,20 +45,25 @@ trait AggregationFunctionTokenizer { this: ClickhouseTokenizerModule =>
case LastValue(column) => ("last_value", tokenizeColumn(column))
case Median(column, level, modifier) =>
val (modifierName, modifierValue) = tokenizeLevelModifier(modifier)
(s"median$modifierName", s"$level)(${tokenizeColumn(column)}${modifierValue.map("," + _).getOrElse("")}")
(s"median$modifierName", s"$level)(${tokenizeColumn(column)}${modifierValue.map(ctx.vDelim + _).getOrElse("")}")
case Min(tableColumn) => ("min", tokenizeColumn(tableColumn))
case Max(tableColumn) => ("max", tokenizeColumn(tableColumn))
case Quantile(column, level, modifier) =>
val (modifierName, modifierValue) = tokenizeLevelModifier(modifier)
(s"quantile$modifierName", s"$level)(${tokenizeColumn(column)}${modifierValue.map("," + _).getOrElse("")})")
(
s"quantile$modifierName",
s"$level)(${tokenizeColumn(column)}${modifierValue.map(ctx.vDelim + _).getOrElse("")})"
)
case Quantiles(column, levels, modifier) =>
val (modifierName, modifierValue) = tokenizeLevelModifier(modifier)
(s"quantiles$modifierName",
s"${levels.mkString(",")})(${tokenizeColumn(column)}${modifierValue.map("," + _).getOrElse("")}")
(
s"quantiles$modifierName",
s"${levels.mkString(ctx.vDelim)})(${tokenizeColumn(column)}${modifierValue.map(ctx.vDelim + _).getOrElse("")}"
)
case Sum(column, modifier) => (s"sum${tokenizeSumModifier(modifier)}", tokenizeColumn(column))
case SumMap(key, value) => (s"sumMap", tokenizeColumns(Seq(key, value)))
case Uniq(columns, modifier) =>
(s"uniq${tokenizeUniqModifier(modifier)}", columns.map(tokenizeColumn).mkString(","))
(s"uniq${tokenizeUniqModifier(modifier)}", columns.map(tokenizeColumn).mkString(ctx.vDelim))
case f: AggregateFunction[_] =>
throw new IllegalArgumentException(s"Cannot use $f aggregated function with combinator")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ case class TokenizeContext(
var joinNr: Int = 0,
var tableAliases: Map[Table, String] = Map.empty,
var useTableAlias: Boolean = false,
delim: String = ", "
fDelim: String = ", ", // function delimiter
vDelim: String = "," // values delimiter
) {

def incrementJoinNumber(): Unit = joinNr += 1
Expand Down Expand Up @@ -79,7 +80,7 @@ trait ClickhouseTokenizerModule
}

protected def tokenizeSeqCol(columns: Column*)(implicit ctx: TokenizeContext): String =
columns.map(tokenizeColumn).mkString(", ")
columns.map(tokenizeColumn).mkString(",")

override def toSql(query: InternalQuery, formatting: Option[String] = Some("JSON"))(implicit
ctx: TokenizeContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ trait StringSearchFunctionTokenizer {
}

val maybeReplaceParam = col match {
case r: StringSearchReplaceFunc => ctx.delim + tokenizeColumn(r.replace.column)
case r: StringSearchReplaceFunc => ctx.fDelim + tokenizeColumn(r.replace.column)
case _ => ""
}
s"$command(${tokenizeColumn(col.col1.column)}${ctx.delim}${tokenizeColumn(col.col2.column)}$maybeReplaceParam)"
s"$command(${tokenizeColumn(col.col1.column)}${ctx.fDelim}${tokenizeColumn(col.col2.column)}$maybeReplaceParam)"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
package com.crobox.clickhouse.dsl.language

import com.crobox.clickhouse.DslTestSpec
import com.crobox.clickhouse.dsl._

class StringSearchFunctionTokenizerTest extends DslTestSpec {

it should "strMatch" in {
toSQL(select(strMatch("abcd", ",")), false) should matchSQL("SELECT match('abcd', ',')")
}
}

0 comments on commit 5d8c0ad

Please sign in to comment.