Skip to content

Commit

Permalink
Rename
Browse files Browse the repository at this point in the history
  • Loading branch information
Leonard Wolters committed Nov 2, 2023
1 parent fa24eff commit cf9d967
Show file tree
Hide file tree
Showing 9 changed files with 43 additions and 43 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ class ClickhouseTimeSeriesIT extends DslITSpec with TableDrivenPropertyChecks {
}

private def getEntries(multiInterval: MultiInterval, entriesId: UUID) =
chExecutor.execute[CustomResult](
queryExecutor.execute[CustomResult](
select(count() as "shields", toUInt64(timeSeries(timestampColumn, multiInterval)) as alias)
.from(OneTestTable)
.groupBy(alias)
Expand Down
24 changes: 12 additions & 12 deletions dsl/src/it/scala/com/crobox/clickhouse/dsl/JoinQueryIT.scala
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
.from(OneTestTable)
.where(notEmpty(itemId))
.join(InnerJoin, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) on itemId
var resultRows = chExecutor.execute[StringResult](query).futureValue.rows
var resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe 0

// reverse tables to check other side of ON condition
query = select(itemId, col2)
.from(TwoTestTable)
.where(notEmpty(itemId))
.join(InnerJoin, select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId))) on itemId
resultRows = chExecutor.execute[StringResult](query).futureValue.rows
resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe 0
}

Expand Down Expand Up @@ -48,7 +48,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
.from(OneTestTable)
.where(notEmpty(itemId))
.join(joinType, TwoTestTable) using itemId
var resultRows = chExecutor.execute[StringResult](query).futureValue.rows
var resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe result

// TABLE -- QUERY
Expand All @@ -57,7 +57,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
.from(OneTestTable)
.where(notEmpty(itemId))
.join(joinType, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) using itemId
resultRows = chExecutor.execute[StringResult](query).futureValue.rows
resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe result

// QUERY -- TABLE
Expand All @@ -68,15 +68,15 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
)
.join(joinType, TwoTestTable)
.where(notEmpty(itemId)) using itemId
resultRows = chExecutor.execute[StringResult](query).futureValue.rows
resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe result

// QUERY -- QUERY
query =
select(dsl.all())
.from(select(shieldId as itemId).from(OneTestTable).where(notEmpty(itemId)))
.join(joinType, select(itemId, col2).from(TwoTestTable).where(notEmpty(itemId))) using itemId
resultRows = chExecutor.execute[StringResult](query).futureValue.rows
resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe result
}
}
Expand All @@ -97,7 +97,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
.where(notEmpty(itemId))
.join(joinType, ThreeTestTable)
.on((itemId, "=", itemId), (col2, "<=", col2))
var resultRows = chExecutor.execute[StringResult](query).futureValue.rows
var resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe result

// TABLE -- QUERY
Expand All @@ -106,7 +106,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
.where(notEmpty(itemId))
.join(joinType, select(itemId, col2).from(ThreeTestTable).where(notEmpty(itemId)))
.on((itemId, "=", itemId), (col2, "<=", col2))
resultRows = chExecutor.execute[StringResult](query).futureValue.rows
resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe result

// QUERY -- TABLE
Expand All @@ -115,7 +115,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
.join(joinType, ThreeTestTable)
.where(notEmpty(itemId))
.on((itemId, "=", itemId), (col2, "<=", col2))
resultRows = chExecutor.execute[StringResult](query).futureValue.rows
resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe result

// QUERY -- QUERY
Expand All @@ -124,7 +124,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
.join(joinType, select(itemId, col2).from(ThreeTestTable).where(notEmpty(itemId)))
.on((itemId, "=", itemId), (col2, "<=", col2))

resultRows = chExecutor.execute[StringResult](query).futureValue.rows
resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe result
}
}
Expand All @@ -133,7 +133,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
it should "correctly handle cross join" in {
val query: OperationalQuery =
select(itemId).from(select(itemId).from(TwoTestTable).join(JoinQuery.CrossJoin, ThreeTestTable))
val resultRows = chExecutor.execute[StringResult](query).futureValue.rows
val resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe 0
}

Expand All @@ -147,7 +147,7 @@ class JoinQueryIT extends DslITSpec with TableDrivenPropertyChecks {
)
.join(AllLeftJoin, ThreeTestTable)
.on(itemId)
val resultRows = chExecutor.execute[StringResult](query).futureValue.rows
val resultRows = queryExecutor.execute[StringResult](query).futureValue.rows
resultRows.length shouldBe 0
}
}
4 changes: 2 additions & 2 deletions dsl/src/it/scala/com/crobox/clickhouse/dsl/QueryIT.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class QueryIT extends DslITSpec {
case class Result(columnResult: String, empty: Int)
implicit val resultFormat: RootJsonFormat[Result] =
jsonFormat[String, Int, Result](Result.apply, "column_1", "empty")
val results: Future[QueryResult[Result]] = chExecutor.execute[Result](
val results: Future[QueryResult[Result]] = queryExecutor.execute[Result](
select(shieldId as itemId, col1, notEmpty(col1) as "empty") from OneTestTable join (InnerJoin, TwoTestTable) using itemId
)
results.futureValue.rows.map(_.columnResult) should be(table2Entries.map(_.firstColumn))
Expand Down Expand Up @@ -78,7 +78,7 @@ class QueryIT extends DslITSpec {
}

def runQry(query: OperationalQuery): Future[String] = {
val che = chExecutor.asInstanceOf[DefaultClickhouseQueryExecutor]
val che = queryExecutor.asInstanceOf[DefaultClickhouseQueryExecutor]
clickhouseClient.query(che.toSql(query.internalQuery))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ class AggregationFunctionsIT extends DslITSpec {
def result = columnResult.toInt
}
implicit val resultFormat: RootJsonFormat[Result] = jsonFormat[String, Result](Result.apply, "result")
val resultSimple = chExecutor
val resultSimple = queryExecutor
.execute[Result](select(uniq(shieldId) as "result") from OneTestTable)
.futureValue
val resultExact = chExecutor
val resultExact = queryExecutor
.execute[Result](select(uniqExact(shieldId) as "result") from OneTestTable)
.futureValue
resultSimple.rows.head.result shouldBe (entries ~% delta)
Expand All @@ -36,7 +36,7 @@ class AggregationFunctionsIT extends DslITSpec {
it should "run quantiles" in {
case class Result(result: Seq[Float])
implicit val resultFormat: RootJsonFormat[Result] = jsonFormat[Seq[Float], Result](Result.apply, "result")
val result = chExecutor
val result = queryExecutor
.execute[Result](
select(quantiles(col2, 0.1F, 0.2F, 0.3F, 0.4F, 0.5F, 0.99F) as ref[Seq[Float]]("result")) from TwoTestTable
)
Expand All @@ -47,7 +47,7 @@ class AggregationFunctionsIT extends DslITSpec {
it should "run for each" in {
case class Result(result: Seq[String])
implicit val resultFormat: RootJsonFormat[Result] = jsonFormat[Seq[String], Result](Result.apply, "result")
val result = chExecutor
val result = queryExecutor
.execute[Result](
select(forEach[Int, TableColumn[Seq[Int]], Double](numbers) { column =>
sum(column)
Expand All @@ -61,7 +61,7 @@ class AggregationFunctionsIT extends DslITSpec {

it should "firstValue in aggregate" in {
val resultRows =
chExecutor
queryExecutor
.execute[StringResult](select(firstValue(col1) as "result").from(TwoTestTable))
.futureValue
.rows
Expand All @@ -71,7 +71,7 @@ class AggregationFunctionsIT extends DslITSpec {

it should "lastValue in aggregate" in {
val resultRows =
chExecutor
queryExecutor
.execute[StringResult](select(lastValue(col1) as "result").from(TwoTestTable))
.futureValue
.rows
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ class StringFunctionsIT extends DslITSpec {

it should "split by character" in {
val resultRows =
chExecutor
queryExecutor
.execute[StringResult](select(arrayJoin(splitByChar(",", col1)) as "result") from TwoTestTable)
.futureValue
.rows
Expand All @@ -22,7 +22,7 @@ class StringFunctionsIT extends DslITSpec {

it should "split by string" in {
val resultRows =
chExecutor
queryExecutor
.execute[StringResult](select(arrayJoin(splitByString("em,", col1)) as "result") from TwoTestTable)
.futureValue
.rows
Expand All @@ -32,7 +32,7 @@ class StringFunctionsIT extends DslITSpec {

it should "concatenate string back" in {
val resultRows =
chExecutor
queryExecutor
.execute[StringResult](select(arrayStringConcat(splitByChar(",", col1), ",") as "result") from TwoTestTable)
.futureValue
.rows
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class UUIDFunctionsIT extends DslITSpec {

it should "handle notEmpty" in {
val resultRows =
chExecutor
queryExecutor
.execute[StringResult](select(shieldId as "result").from(OneTestTable).where(dsl.notEmpty(shieldId)))
.futureValue
.rows
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,46 +15,46 @@ object QueryImprovements extends LazyLogging {

def executeWithLogging[V: JsonReader](debug: Boolean)(
implicit executionContext: ExecutionContext,
clickhouseExecutor: QueryExecutor
queryExecutor: QueryExecutor
): Future[QueryResult[V]] = {
if (debug)
logger.info(
s"SQL: ${tokenizer.toSql(query.internalQuery)(TokenizeContext(clickhouseExecutor.serverVersion))}"
s"SQL: ${tokenizer.toSql(query.internalQuery)(TokenizeContext(queryExecutor.serverVersion))}"
)
clickhouseExecutor.execute(query)
queryExecutor.execute(query)
}

def executeWithLogging[V: JsonReader](traceId: String)(
implicit executionContext: ExecutionContext,
clickhouseExecutor: QueryExecutor
queryExecutor: QueryExecutor
): Future[QueryResult[V]] = {
logger.info(
s"[$traceId] ${tokenizer.toSql(query.internalQuery)(TokenizeContext(clickhouseExecutor.serverVersion))}"
s"[$traceId] ${tokenizer.toSql(query.internalQuery)(TokenizeContext(queryExecutor.serverVersion))}"
)
clickhouseExecutor.execute(query)
queryExecutor.execute(query)
}

def executeWithLogging[V: JsonReader](traceId: Option[String])(
implicit executionContext: ExecutionContext,
clickhouseExecutor: QueryExecutor
queryExecutor: QueryExecutor
): Future[QueryResult[V]] = {
traceId.foreach(
id =>
logger.info(
s"[$id] ${tokenizer.toSql(query.internalQuery)(TokenizeContext(clickhouseExecutor.serverVersion))}"
s"[$id] ${tokenizer.toSql(query.internalQuery)(TokenizeContext(queryExecutor.serverVersion))}"
)
)
clickhouseExecutor.execute(query)
queryExecutor.execute(query)
}

def executeWithLogging[V: JsonReader](
implicit executionContext: ExecutionContext,
clickhouseExecutor: QueryExecutor
queryExecutor: QueryExecutor
): Future[QueryResult[V]] = {
logger.info(
s"SQL: ${tokenizer.toSql(query.internalQuery)(TokenizeContext(clickhouseExecutor.serverVersion))}"
s"SQL: ${tokenizer.toSql(query.internalQuery)(TokenizeContext(queryExecutor.serverVersion))}"
)
clickhouseExecutor.execute(query)
queryExecutor.execute(query)
}
}
}
10 changes: 5 additions & 5 deletions dsl/src/main/scala/com/crobox/clickhouse/dsl/package.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package com.crobox.clickhouse

import com.crobox.clickhouse.dsl.column.ClickhouseColumnFunctions
import com.crobox.clickhouse.dsl.execution.{ClickhouseQueryExecutor, QueryExecutor, QueryResult}
import com.crobox.clickhouse.dsl.execution.{QueryExecutor, QueryResult}
import com.crobox.clickhouse.dsl.marshalling.{QueryValue, QueryValueFormats}
import spray.json.{JsonReader, JsonWriter}

Expand All @@ -18,16 +18,16 @@ package object dsl extends ClickhouseColumnFunctions with QueryFactory with Quer

def execute[V: JsonReader](
implicit executionContext: ExecutionContext,
clickhouseExecutor: QueryExecutor
): Future[QueryResult[V]] = clickhouseExecutor.execute(query)
queryExecutor: QueryExecutor
): Future[QueryResult[V]] = queryExecutor.execute(query)
}

implicit class ValueInsertion[V: JsonWriter](values: Seq[V]) {

def into(table: Table)(
implicit executionContext: ExecutionContext,
clickhouseExecutor: QueryExecutor
): Future[String] = clickhouseExecutor.insert(table, values)
queryExecutor: QueryExecutor
): Future[String] = queryExecutor.insert(table, values)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ trait DslIntegrationSpec

implicit val ec: ExecutionContext

implicit lazy val chExecutor: QueryExecutor = ClickhouseQueryExecutor.default(clickClient)
implicit lazy val queryExecutor: QueryExecutor = ClickhouseQueryExecutor.default(clickClient)

override def beforeAll(): Unit = {
super.beforeAll()
Expand Down

0 comments on commit cf9d967

Please sign in to comment.