Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Spark 3.4 and 3.5 support #365

Merged
merged 5 commits into from
Dec 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion CHANEGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 1.0.4

* Add support for Spark 3.4.x and Spark 3.5.x

## 1.0.2

* Add support for Spark 3.3.x, remove support for Spark 3.0.x
Expand All @@ -15,4 +19,3 @@
### Breaking changes
* Protobuf maps are now represented as Spark in maps. Prior to this change
maps were represented as a list of key-value structs. (#79)

34 changes: 29 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@ val Scala212 = "2.12.18"

val Scala213 = "2.13.12"

lazy val Spark35 = Spark("3.5.0")

lazy val Spark34 = Spark("3.4.2")

lazy val Spark33 = Spark("3.3.3")

lazy val Spark32 = Spark("3.2.3")
Expand All @@ -24,6 +28,8 @@ lazy val ScalaPB0_10 = ScalaPB("0.10.11")

lazy val framelessDatasetName = settingKey[String]("frameless-dataset-name")

lazy val framelessDatasetVersion = settingKey[String]("frameless-dataset-version")

lazy val spark = settingKey[Spark]("spark")

lazy val scalapb = settingKey[ScalaPB]("scalapb")
Expand All @@ -44,7 +50,7 @@ lazy val `sparksql-scalapb` = (projectMatrix in file("sparksql-scalapb"))
.defaultAxes()
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %% framelessDatasetName.value % "0.14.0",
"org.typelevel" %% framelessDatasetName.value % framelessDatasetVersion.value,
"com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.value.scalapbVersion,
"com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.value.scalapbVersion % "protobuf",
"org.apache.spark" %% "spark-sql" % spark.value.sparkVersion % "provided",
Expand Down Expand Up @@ -77,10 +83,18 @@ lazy val `sparksql-scalapb` = (projectMatrix in file("sparksql-scalapb"))
},
framelessDatasetName := {
spark.value match {
case Spark33 => "frameless-dataset"
case Spark32 => "frameless-dataset-spark32"
case Spark31 => "frameless-dataset-spark31"
case _ => ???
case Spark35 | Spark34 | Spark33 => "frameless-dataset"
case Spark32 => "frameless-dataset-spark32"
case Spark31 => "frameless-dataset-spark31"
case _ => ???
}
},
framelessDatasetVersion := {
spark.value match {
case Spark35 | Spark34 | Spark33 => "0.16.0" // NPE in 3.4, 3.5 if older lib versions used
case Spark32 => "0.15.0" // Spark3.2 support dropped in ver > 0.15.0
case Spark31 => "0.14.0" // Spark3.1 support dropped in ver > 0.14.0
case _ => ???
}
},
name := s"sparksql${spark.value.majorVersion}${spark.value.minorVersion}-${scalapb.value.idSuffix}",
Expand All @@ -93,6 +107,16 @@ lazy val `sparksql-scalapb` = (projectMatrix in file("sparksql-scalapb"))
Test / run / fork := true,
Test / javaOptions ++= Seq("-Xmx2G")
)
.customRow(
scalaVersions = Seq(Scala212, Scala213),
axisValues = Seq(Spark35, ScalaPB0_11, VirtualAxis.jvm),
settings = Seq()
)
.customRow(
scalaVersions = Seq(Scala212, Scala213),
axisValues = Seq(Spark34, ScalaPB0_11, VirtualAxis.jvm),
settings = Seq()
)
.customRow(
scalaVersions = Seq(Scala212, Scala213),
axisValues = Seq(Spark33, ScalaPB0_11, VirtualAxis.jvm),
Expand Down