-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathbuild.sbt
182 lines (161 loc) · 6.08 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
// BUILD SETUP
ThisBuild / organization := "com.amadeus.dataio"
ThisBuild / versionScheme := Some("early-semver")
ThisBuild / scalaVersion := "2.12.15"
val scalatestVersion = "3.2.15"
val scalamockVersion = "5.2.0"
val sparkVersion = "3.5.0"
val typesafeConfigVersion = "1.4.3"
val slf4jApiVersion = "2.0.7"
val commonsIoVersion = "2.13.0"
// RELEASE SETUP
import sbt.Keys.libraryDependencies
import sbtrelease.ReleaseStateTransformations.*
def getReleaseVersion(ver: String, bumpType: String): String = {
val pattern = """(\d+)\.(\d+)\.(\d+)-(spark[\d.]+)-SNAPSHOT""".r
ver match {
case pattern(major, minor, patch, sparkVersion) =>
bumpType match {
case "MAJOR" => s"${major.toInt + 1}.0.0-$sparkVersion"
case "MINOR" => s"$major.${minor.toInt + 1}.0-$sparkVersion"
case "PATCH" => s"$major.$minor.$patch-$sparkVersion"
case _ => sys.error(s"Invalid RELEASE_TYPE: $bumpType")
}
case _ => sys.error(s"Invalid version format: $ver")
}
}
def getReleaseNextVersion(ver: String): String = {
val pattern = """(\d+)\.(\d+)\.(\d+)-(spark[\d.]+)""".r
ver match {
case pattern(major, minor, patch, sparkVersion) =>
s"$major.$minor.${patch.toInt + 1}-$sparkVersion-SNAPSHOT"
case _ => sys.error(s"Invalid version format: $ver")
}
}
val bumpType = sys.env.getOrElse("RELEASE_TYPE", "PATCH")
releaseVersion := { getReleaseVersion(_, bumpType) }
releaseNextVersion := { getReleaseNextVersion }
ThisBuild / releaseProcess := Seq[ReleaseStep](
checkSnapshotDependencies, // Ensure no SNAPSHOT dependencies exist
inquireVersions, // Ask for new version (auto-updated)
setReleaseVersion, // Set the new version
commitReleaseVersion, // Commit with updated version
tagRelease, // Tag in Git
publishArtifacts, // Publish JARs
setNextVersion, // Set the next development version
commitNextVersion, // Commit next version
pushChanges // Push everything to Git
)
// Global GitHub Packages settings
ThisBuild / credentials += Credentials(
"GitHub Package Registry",
"maven.pkg.github.com",
"",
sys.env.getOrElse("GITHUB_REGISTRY_TOKEN", "")
)
ThisBuild / publishTo := Some(
"GitHub Packages" at "https://maven.pkg.github.com/AmadeusITGroup/dataio-framework"
)
// ThisBuild / publishTo := Some(Resolver.file("local-maven", file(Path.userHome.absolutePath + "/.m2/repository")))
ThisBuild / publishMavenStyle := true
// Additional Maven metadata
ThisBuild / pomIncludeRepository := { _ => false }
ThisBuild / pomExtra :=
<url>https://github.com/AmadeusITGroup/dataio-framework</url>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>https://github.com/AmadeusITGroup/dataio-framework/blob/main/LICENSE</url>
</license>
</licenses>
// TESTS SETUP
ThisBuild / Test / parallelExecution := false
ThisBuild / Test / publishArtifact := false
// PROJECTS SETUP
lazy val commonSettings = Seq(
libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-sql" % sparkVersion,
"org.apache.spark" %% "spark-core" % sparkVersion,
"com.typesafe" % "config" % typesafeConfigVersion,
"org.scalatest" %% "scalatest" % scalatestVersion % Test,
"org.scalamock" %% "scalamock" % scalamockVersion % Test
)
)
/** Shared traits and functions for testing inside Data I/O sub projects.
* It should not be published, and only be used in the Data I/O project itself.
* @see [[test]] For testing applications made with Data I/O.
*/
lazy val testutils = (project in file("testutils"))
.settings(
libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-sql" % sparkVersion,
"org.apache.spark" %% "spark-core" % sparkVersion,
"com.typesafe" % "config" % typesafeConfigVersion,
"org.scalatest" %% "scalatest" % scalatestVersion,
"org.scalamock" %% "scalamock" % scalamockVersion
),
publish / skip := true
)
lazy val core = (project in file("core"))
.settings(
commonSettings,
name := "dataio-core",
libraryDependencies ++= Seq(
"org.slf4j" % "slf4j-api" % slf4jApiVersion,
"commons-io" % "commons-io" % commonsIoVersion
)
)
.dependsOn(testutils % Test)
lazy val kafka = (project in file("kafka"))
.settings(
commonSettings,
name := "dataio-kafka",
libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-sql-kafka-0-10" % sparkVersion,
"io.github.embeddedkafka" %% "embedded-kafka" % "3.5.1" % Test,
"io.github.embeddedkafka" %% "embedded-kafka-streams" % "3.5.1" % Test
)
)
.dependsOn(core, testutils % Test)
lazy val snowflake = (project in file("snowflake"))
.settings(
commonSettings,
name := "dataio-snowflake",
libraryDependencies ++= Seq(
"net.snowflake" %% "spark-snowflake" % f"3.1.1"
)
)
.dependsOn(core, testutils % Test)
lazy val elasticsearch = (project in file("elasticsearch"))
.settings(
commonSettings,
name := "dataio-elasticsearch",
libraryDependencies ++= Seq(
"org.elasticsearch" %% "elasticsearch-spark-30" % "8.17.4"
exclude ("org.scala-lang", "scala-library")
exclude ("org.scala-lang", "scala-reflect")
exclude ("org.slf4j", "slf4j-api")
exclude ("org.apache.spark", "spark-core_" + scalaVersion.value.substring(0, 4))
exclude ("org.apache.spark", "spark-sql_" + scalaVersion.value.substring(0, 4))
exclude ("org.apache.spark", "spark-catalyst_" + scalaVersion.value.substring(0, 4))
exclude ("org.apache.spark", "spark-streaming_" + scalaVersion.value.substring(0, 4))
)
)
.dependsOn(core, testutils % Test)
lazy val test = (project in file("test"))
.settings(
commonSettings,
name := "dataio-test",
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % scalatestVersion,
"org.scalamock" %% "scalamock" % scalamockVersion
)
)
.dependsOn(core, testutils % Test)
// Projects configuration
lazy val root = (project in file("."))
.settings(
name := "dataio",
publish / skip := true
)
.aggregate(core, test, kafka)