Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/array col magnet #146

Merged
merged 4 commits into from
Jul 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ class HostBalancerTest extends ClickhouseClientSpec {
case ClusterAwareHostBalancer(host, cluster, _, builtTimeout) =>
host shouldEqual ClickhouseHostBuilder.toHost("localhost", Some(8123))
cluster shouldBe "cluster"
builtTimeout shouldBe (1 second)
builtTimeout shouldBe 1.second
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class ClusterConnectionFlowTest extends ClickhouseClientAsyncSpec {
private val clickhouseUri: Uri = ClickhouseHostBuilder.toHost("localhost", Some(8123))
it should "select cluster hosts" in {
val (_, futureResult) = ClusterConnectionFlow
.clusterConnectionsFlow(Future.successful(clickhouseUri), 2 seconds, "test_shard_localhost")
.clusterConnectionsFlow(Future.successful(clickhouseUri), 2.seconds, "test_shard_localhost")
.toMat(Sink.head)(Keep.both)
.run()
futureResult.map(result => {
Expand All @@ -23,7 +23,7 @@ class ClusterConnectionFlowTest extends ClickhouseClientAsyncSpec {

it should "fail for non existing cluster" in {
val (_, futureResult) = ClusterConnectionFlow
.clusterConnectionsFlow(Future.successful(clickhouseUri), 2 seconds, "cluster")
.clusterConnectionsFlow(Future.successful(clickhouseUri), 2.seconds, "cluster")
.toMat(Sink.head)(Keep.both)
.run()
futureResult
Expand Down
14 changes: 4 additions & 10 deletions dsl/src/main/scala/com.crobox.clickhouse/dsl/column/Magnets.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,6 @@ trait Magnets {
override val column: TableColumn[T] = Const(s)
}

sealed trait TupleColMagnet extends Magnet[Nothing]

/**
* Represents any accepted type for the right hand argument of the IN operators (tuple, table or Qry)
*/
Expand Down Expand Up @@ -88,18 +86,14 @@ trait Magnets {
*/
sealed trait ArrayColMagnet[+C] extends Magnet[C]

implicit def arrayColMagnetFromIterable[T: QueryValue](s: Iterable[T]): ArrayColMagnet[Iterable[T]] =
new ArrayColMagnet[Iterable[T]] {

implicit def arrayColMagnetFromIterableConst[T: QueryValue](s: scala.Iterable[T]): ArrayColMagnet[scala.Iterable[T]] =
new ArrayColMagnet[scala.Iterable[T]] {
val qvForIterable = QueryValueFormats.queryValueToSeq(implicitly[QueryValue[T]])

override val column = Const(s)(qvForIterable)
}

implicit def arrayColMagnetFromIterableCol[Elem, Collection[B] <: Iterable[B], ColType[A] <: TableColumn[A]](
s: ColType[Collection[Elem]]
): ArrayColMagnet[Collection[Elem]] =
new ArrayColMagnet[Collection[Elem]] {
implicit def arrayColMagnetFromIterableCol[C](s: TableColumn[scala.Iterable[C]]): ArrayColMagnet[scala.Iterable[C]] =
new ArrayColMagnet[scala.Iterable[C]] {
override val column = s
}

Expand Down
8 changes: 4 additions & 4 deletions dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryTest.scala
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package com.crobox.clickhouse.dsl

import com.crobox.clickhouse._
import com.crobox.clickhouse.dsl.JoinQuery.InnerJoin
import com.crobox.clickhouse.dsl.schemabuilder.ColumnType
import com.crobox.clickhouse._
import org.joda.time.{DateTime, LocalDate}

import java.util.UUID
Expand Down Expand Up @@ -61,7 +61,7 @@ class QueryTest extends DslTestSpec {
s"SELECT item_id FROM $database.captainAmerica WHERE column_2 >= 2 FORMAT JSON"
)
}

it should "compose indexOf and arrayElement" in {

def lookupNestedValue(column: NativeColumn[_], elm: String): ExpressionColumn[String] =
Expand Down Expand Up @@ -186,7 +186,7 @@ class QueryTest extends DslTestSpec {
}

it should "select from using ALIAS and final" in {
var query = select(shieldId as itemId, col1, notEmpty(col1) as "empty") from OneTestTable as "3sf" asFinal
var query = select(shieldId as itemId, col1, notEmpty(col1) as "empty").from(OneTestTable).as("3sf").asFinal

toSql(query.internalQuery) should matchSQL(
s"""
Expand All @@ -195,7 +195,7 @@ class QueryTest extends DslTestSpec {
|FORMAT JSON""".stripMargin
)

query = select(shieldId as itemId, col1, notEmpty(col1) as "empty") from OneTestTable as "3sf"
query = select(shieldId as itemId, col1, notEmpty(col1) as "empty").from(OneTestTable).as("3sf")

toSql(query.internalQuery) should matchSQL(
s"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class ArrayFunctionsTest extends DslTestSpec {

it should "arrayFunction: array" in {
toSQL(select(Array())) should be("SELECT []")
toSQL(select(Array(1,2))) should be("SELECT [1, 2]")
toSQL(select(Array(1, 2))) should be("SELECT [1, 2]")
}

it should "arrayFunction: arrayConcat" in {
Expand Down Expand Up @@ -52,4 +52,9 @@ class ArrayFunctionsTest extends DslTestSpec {
it should "arrayFunction: join" in {
toSQL(select(arrayJoin(Array(shieldId, itemId)))) should be(s"SELECT arrayJoin([shield_id, item_id])")
}

it should "arrayFunction: join with concat" in {
val col = arrayConcat(Array(shieldId, itemId), Array[String]())
toSQL(select(arrayJoin(col))) should be(s"SELECT arrayJoin(arrayConcat([shield_id, item_id], []))")
}
}
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version=1.6.2
sbt.version=1.8.3
Loading