Skip to content

Commit

Permalink
[HOTFIX] Remove caching ccache files (oap-project#358)
Browse files Browse the repository at this point in the history
Remove caching ccache files
  • Loading branch information
zzcclp authored Sep 7, 2022
1 parent 8950526 commit 033f34c
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 38 deletions.
29 changes: 15 additions & 14 deletions .github/workflows/unittests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -140,20 +140,21 @@ jobs:
- run: free
- run: sudo apt-get update
- run: sudo apt-get install -y maven
- name: ccache cache files
uses: actions/cache@v2
with:
path: ~/.ccache
key: ${{ github.job }}-ccache
restore-keys: |
${{ github.job }}-ccache
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
# no need to cache these as it's running on self host server
#- name: ccache cache files
# uses: actions/cache@v2
# with:
# path: ~/.ccache
# key: ${{ github.job }}-ccache
# restore-keys: |
# ${{ github.job }}-ccache
#- name: Cache local Maven repository
# uses: actions/cache@v2
# with:
# path: ~/.m2/repository
# key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
# restore-keys: |
# ${{ runner.os }}-maven-
- name: Check ClickHouse Backend lib
run: |
ls -alt /usr/local/clickhouse/lib/libch.so
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ object DSV2BenchmarkTest extends AdaptiveSparkPlanHelper {
.config("spark.memory.offHeap.size", "21474836480")
.config("spark.shuffle.sort.bypassMergeThreshold", "200")
.config("spark.local.dir", sparkLocalDir)
.config("spark.executor.heartbeatInterval", "240s")
.config("spark.executor.heartbeatInterval", "30s")
.config("spark.network.timeout", "300s")
.config("spark.sql.optimizer.dynamicPartitionPruning.enabled", "true")
.config("spark.sql.optimizer.dynamicPartitionPruning.useStats", "true")
Expand Down Expand Up @@ -283,27 +283,7 @@ object DSV2BenchmarkTest extends AdaptiveSparkPlanHelper {
val startTime = System.nanoTime()
val df = spark.sql(
s"""
|SELECT
| l_returnflag,
| l_linestatus,
| sum(l_quantity) AS sum_qty,
| sum(l_extendedprice) AS sum_base_price,
| sum(l_extendedprice * (1 - l_discount)) AS sum_disc_price,
| sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) AS sum_charge,
| avg(l_quantity) AS avg_qty,
| avg(l_extendedprice) AS avg_price,
| avg(l_discount) AS avg_disc,
| count(*) AS count_order
|FROM
| ch_lineitem
|WHERE
| l_shipdate <= date'1998-09-02' - interval 1 day
|GROUP BY
| l_returnflag,
| l_linestatus
|ORDER BY
| l_returnflag,
| l_linestatus;
|select count(l_orderkey) from ch_lineitem where l_shipdate = date'1994-01-01'
|""".stripMargin) // .show(30, false)
// df.queryExecution.debug.codegen
// df.explain(false)
Expand Down Expand Up @@ -401,7 +381,7 @@ object DSV2BenchmarkTest extends AdaptiveSparkPlanHelper {
val tookTimeArr = ArrayBuffer[Long]()
val executedCnt = 1
val executeExplain = false
val sqlFilePath = "/data2/tpch-queries-spark/"
val sqlFilePath = "/data2/tpch-queries-spark100-nohint/"
for (i <- 1 to 22) {
if (i != 21) {
val sqlNum = "q" + "%02d".format(i)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ object DSV2TPCDSBenchmarkTest extends AdaptiveSparkPlanHelper {
.config("spark.memory.offHeap.size", "21474836480")
.config("spark.shuffle.sort.bypassMergeThreshold", "200")
.config("spark.local.dir", sparkLocalDir)
.config("spark.executor.heartbeatInterval", "240s")
.config("spark.executor.heartbeatInterval", "30s")
.config("spark.network.timeout", "300s")
.config("spark.sql.optimizer.dynamicPartitionPruning.enabled", "true")
.config("spark.sql.optimizer.dynamicPartitionPruning.useStats", "true")
Expand Down

0 comments on commit 033f34c

Please sign in to comment.