Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 12 additions & 4 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,15 @@ jobs:
fail-fast: false
matrix:
java: [ 8, 17 ]
scala: [ 2.12, 2.13 ]
spark: [ 3.3, 3.4, 3.5 ]
scala: [ '2.12', '2.13' ]
spark: [ '3.3', '3.4', '3.5', '4.0' ]
exclude:
# Spark 4.0 only supports Scala 2.13
- spark: '4.0'
scala: '2.12'
# Spark 4.0 requires Java 11+
- spark: '4.0'
java: 8
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
Expand Down Expand Up @@ -51,14 +58,15 @@ jobs:
fail-fast: false
matrix:
clickhouse: [ 25.3, 25.6, 25.7, latest ]
java: [ 17 ]
env:
CLICKHOUSE_IMAGE: clickhouse/clickhouse-server:${{ matrix.clickhouse }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 8
java-version: ${{ matrix.java }}
cache: gradle
- run: >-
./gradlew clean test --no-daemon --refresh-dependencies
Expand All @@ -67,7 +75,7 @@ jobs:
if: failure()
uses: actions/upload-artifact@v4
with:
name: log-clickhouse-${{ matrix.clickhouse }}
name: log-clickhouse-${{ matrix.clickhouse }}-java-${{ matrix.java }}
path: |
**/build/unit-tests.log
log/**
13 changes: 11 additions & 2 deletions .github/workflows/check-license.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,22 @@ jobs:
strategy:
fail-fast: false
matrix:
spark: [ 3.3, 3.4, 3.5 ]
spark: [ "3.3", "3.4", "3.5", "4.0" ]
include:
- spark: "3.3"
java: 8
- spark: "3.4"
java: 8
- spark: "3.5"
java: 8
- spark: "4.0"
java: 17
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 8
java-version: ${{ matrix.java }}
- run: >-
./gradlew rat --no-daemon
-Dspark_binary_version=${{ matrix.spark }}
Expand Down
16 changes: 12 additions & 4 deletions .github/workflows/cloud.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,16 @@ jobs:
max-parallel: 1
fail-fast: false
matrix:
spark: [ 3.3, 3.4, 3.5 ]
scala: [ 2.12, 2.13 ]
spark: [ '3.3', '3.4', '3.5', '4.0' ]
scala: [ '2.12', '2.13' ]
java: [ 8, 17 ]
exclude:
# Spark 4.0 only supports Scala 2.13
- spark: '4.0'
scala: '2.12'
# Spark 4.0 requires Java 11+
- spark: '4.0'
java: 8
env:
CLICKHOUSE_CLOUD_HOST: ${{ secrets.INTEGRATIONS_TEAM_TESTS_CLOUD_HOST_SMT }}
CLICKHOUSE_CLOUD_PASSWORD: ${{ secrets.INTEGRATIONS_TEAM_TESTS_CLOUD_PASSWORD_SMT }}
Expand All @@ -44,7 +52,7 @@ jobs:
- uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 8
java-version: ${{ matrix.java }}
cache: gradle
- name: Wake up ClickHouse Cloud instance
env:
Expand Down Expand Up @@ -80,7 +88,7 @@ jobs:
if: failure()
uses: actions/upload-artifact@v4
with:
name: log-clickhouse-cloud-spark-${{ matrix.spark }}-scala-${{ matrix.scala }}
name: log-clickhouse-cloud-spark-${{ matrix.spark }}-scala-${{ matrix.scala }}-java-${{ matrix.java }}
path: |
**/build/unit-tests.log
log/**
13 changes: 11 additions & 2 deletions .github/workflows/style.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,22 @@ jobs:
strategy:
fail-fast: false
matrix:
spark: [ 3.3, 3.4, 3.5 ]
spark: [ "3.3", "3.4", "3.5", "4.0" ]
include:
- spark: "3.3"
java: 8
- spark: "3.4"
java: 8
- spark: "3.5"
java: 8
- spark: "4.0"
java: 17
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 8
java-version: ${{ matrix.java }}
cache: gradle
- run: >-
./gradlew spotlessCheck --no-daemon --refresh-dependencies
Expand Down
16 changes: 12 additions & 4 deletions .github/workflows/tpcds.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,22 @@ jobs:
strategy:
fail-fast: false
matrix:
spark: [ 3.3, 3.4, 3.5 ]
scala: [ 2.12, 2.13 ]
spark: [ '3.3', '3.4', '3.5', '4.0' ]
scala: [ '2.12', '2.13' ]
java: [ 8, 17 ]
exclude:
# Spark 4.0 only supports Scala 2.13
- spark: '4.0'
scala: '2.12'
# Spark 4.0 requires Java 11+
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
# Spark 4.0 requires Java 11+
# Spark 4.0 requires Java 17+

- spark: '4.0'
java: 8
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 8
java-version: ${{ matrix.java }}
cache: gradle
- run: >-
./gradlew clean slowTest --no-daemon --refresh-dependencies
Expand All @@ -48,7 +56,7 @@ jobs:
if: failure()
uses: actions/upload-artifact@v4
with:
name: log-tpcds-spark-${{ matrix.spark }}-scala-${{ matrix.scala }}
name: log-tpcds-spark-${{ matrix.spark }}-scala-${{ matrix.scala }}-java-${{ matrix.java }}
path: |
**/build/unit-tests.log
log/**
18 changes: 12 additions & 6 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,11 @@ allprojects {
subprojects {
apply plugin: "scala"
apply plugin: "java-library"
apply plugin: "org.scoverage"
// Disable scoverage when running Metals' bloopInstall to avoid plugin resolution issues
def isBloopInstall = gradle.startParameter.taskNames.any { it.contains('bloopInstall') }
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did you check if it does not affect IntelliJ?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It shouldn't affect as IntelliJ uses its own build server and it doesn't use Bloop at all

if (!project.hasProperty('disableScoverage') && !isBloopInstall) {
apply plugin: "org.scoverage"
}
apply plugin: "com.diffplug.spotless"
apply plugin: "com.github.maiflai.scalatest"

Expand Down Expand Up @@ -168,11 +172,13 @@ subprojects {
}
}

scoverage {
scoverageVersion = "2.0.11"
reportDir.set(file("${rootProject.buildDir}/reports/scoverage"))
highlighting.set(false)
minimumRate.set(0.0)
if (plugins.hasPlugin('org.scoverage')) {
scoverage {
scoverageVersion = "2.0.11"
reportDir.set(file("${rootProject.buildDir}/reports/scoverage"))
highlighting.set(false)
minimumRate.set(0.0)
}
}

spotless {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,18 @@ package com.clickhouse.spark.base
import com.clickhouse.spark.Utils
import com.clickhouse.data.ClickHouseVersion
import com.dimafeng.testcontainers.{ForAllTestContainer, JdbcDatabaseContainer, SingleContainer}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
import org.slf4j.LoggerFactory
import org.testcontainers.containers.ClickHouseContainer
import org.testcontainers.utility.{DockerImageName, MountableFile}
import java.nio.file.{Path, Paths}
import scala.collection.JavaConverters._

trait ClickHouseSingleMixIn extends AnyFunSuite with ForAllTestContainer with ClickHouseProvider {
trait ClickHouseSingleMixIn extends AnyFunSuite with BeforeAndAfterAll with ForAllTestContainer
with ClickHouseProvider {

private val logger = LoggerFactory.getLogger(getClass)
// format: off
private val CLICKHOUSE_IMAGE: String = Utils.load("CLICKHOUSE_IMAGE", "clickhouse/clickhouse-server:23.8")
private val CLICKHOUSE_USER: String = Utils.load("CLICKHOUSE_USER", "default")
Expand All @@ -34,6 +39,8 @@ trait ClickHouseSingleMixIn extends AnyFunSuite with ForAllTestContainer with Cl
private val CLICKHOUSE_TPC_PORT = 9000
// format: on

logger.info(s"Initializing with ClickHouse image: $CLICKHOUSE_IMAGE")

override val clickhouseVersion: ClickHouseVersion = ClickHouseVersion.of(CLICKHOUSE_IMAGE.split(":").last)

protected val rootProjectDir: Path = {
Expand Down Expand Up @@ -80,4 +87,20 @@ trait ClickHouseSingleMixIn extends AnyFunSuite with ForAllTestContainer with Cl
override def clickhousePassword: String = CLICKHOUSE_PASSWORD
override def clickhouseDatabase: String = CLICKHOUSE_DB
override def isSslEnabled: Boolean = false

override def beforeAll(): Unit = {
val startTime = System.currentTimeMillis()
logger.info(s"Starting ClickHouse container: $CLICKHOUSE_IMAGE")
super.beforeAll() // This starts the container and makes mappedPort available
val duration = System.currentTimeMillis() - startTime
logger.info(
s"ClickHouse container started in ${duration}ms at ${container.host}:${container.mappedPort(CLICKHOUSE_HTTP_PORT)}"
)
}

override def afterAll(): Unit = {
logger.info("Stopping ClickHouse container")
super.afterAll()
logger.info("ClickHouse container stopped")
}
}
12 changes: 9 additions & 3 deletions gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ mavenCentralMirror=https://repo1.maven.org/maven2/
mavenSnapshotsRepo=https://central.sonatype.com/repository/maven-snapshots/
mavenReleasesRepo=https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/

systemProp.scala_binary_version=2.12
systemProp.scala_binary_version=2.13
systemProp.known_scala_binary_versions=2.12,2.13
systemProp.spark_binary_version=3.5
systemProp.known_spark_binary_versions=3.3,3.4,3.5
systemProp.spark_binary_version=4.0
systemProp.known_spark_binary_versions=3.3,3.4,3.5,4.0

group=com.clickhouse.spark

Expand All @@ -29,26 +29,32 @@ clickhouse_client_v2_version=0.9.4
spark_33_version=3.3.4
spark_34_version=3.4.2
spark_35_version=3.5.1
spark_40_version=4.0.1

spark_33_scala_212_version=2.12.15
spark_34_scala_212_version=2.12.17
spark_35_scala_212_version=2.12.18
spark_40_scala_212_version=2.12.18
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

spark 4.0 does not support scala 2.12

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good catch


spark_33_scala_213_version=2.13.8
spark_34_scala_213_version=2.13.8
spark_35_scala_213_version=2.13.8
spark_40_scala_213_version=2.13.8

spark_33_antlr_version=4.8
spark_34_antlr_version=4.9.3
spark_35_antlr_version=4.9.3
spark_40_antlr_version=4.13.1

spark_33_jackson_version=2.13.4
spark_34_jackson_version=2.14.2
spark_35_jackson_version=2.15.2
spark_40_jackson_version=2.17.0

spark_33_slf4j_version=1.7.32
spark_34_slf4j_version=2.0.6
spark_35_slf4j_version=2.0.7
spark_40_slf4j_version=2.0.7

# Align with Apache Spark, and don't bundle them in release jar.
commons_lang3_version=3.12.0
Expand Down
5 changes: 5 additions & 0 deletions settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,8 @@ project(":clickhouse-spark-runtime-${spark_binary_version}_$scala_binary_version
include ":clickhouse-spark-it-${spark_binary_version}_$scala_binary_version"
project(":clickhouse-spark-it-${spark_binary_version}_$scala_binary_version").projectDir = file("spark-${spark_binary_version}/clickhouse-spark-it")
project(":clickhouse-spark-it-${spark_binary_version}_$scala_binary_version").name = "clickhouse-spark-it-${spark_binary_version}_$scala_binary_version"

// Examples module for running/debugging sample apps in IDE
include ":clickhouse-examples-${spark_binary_version}_$scala_binary_version"
project(":clickhouse-examples-${spark_binary_version}_$scala_binary_version").projectDir = file("spark-${spark_binary_version}/examples")
project(":clickhouse-examples-${spark_binary_version}_$scala_binary_version").name = "clickhouse-examples-${spark_binary_version}_$scala_binary_version"
Loading