Skip to content

Commit ea1e09b

Browse files
authored
#401 - fixed executablesFolder for shell scripts. (#413)
1 parent c4691dc commit ea1e09b

File tree

5 files changed

+20
-9
lines changed

5 files changed

+20
-9
lines changed

src/main/resources/application.properties

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,6 @@ scheduler.jobs.parallel.number=100
5050
scheduler.heart.beat=5000
5151
scheduler.lag.threshold=20000
5252
scheduler.sensors.changedSensorsChunkQuerySize=100
53-
scheduler.executors.executablesFolder=/
5453

5554
#Kafka sensor properties.
5655
kafkaSource.group.id.prefix=hyper_drive
@@ -70,6 +69,9 @@ kafkaSource.properties.sasl.kerberos.service.name=
7069
kafkaSource.properties.sasl.mechanism=
7170
kafkaSource.properties.sasl.jaas.config=
7271

72+
#Shell executor properties.
73+
shellExecutor.executablesFolder=/
74+
7375
#Spark yarn sink properties. Properties used to deploy and run Spark job in Yarn.
7476
sparkYarnSink.hadoopResourceManagerUrlBase=
7577
sparkYarnSink.hadoopConfDir=
@@ -88,6 +90,7 @@ sparkYarnSink.additionalConfs.spark.yarn.keytab=
8890
sparkYarnSink.additionalConfs.spark.yarn.principal=
8991
sparkYarnSink.additionalConfs.spark.shuffle.service.enabled=true
9092
sparkYarnSink.additionalConfs.spark.dynamicAllocation.enabled=true
93+
sparkYarnSink.executablesFolder=/
9194

9295
#Postgresql properties for connection to trigger metastore
9396
db.driver=net.bull.javamelody.JdbcDriver

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/shell/ShellParameters.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,14 @@ package za.co.absa.hyperdrive.trigger.scheduler.executors.shell
1818
import java.nio.file.Paths
1919

2020
import za.co.absa.hyperdrive.trigger.models.JobParameters
21-
import za.co.absa.hyperdrive.trigger.scheduler.utilities.ExecutorsConfig
21+
import za.co.absa.hyperdrive.trigger.scheduler.utilities.ShellExecutorConfig
2222

2323
case class ShellParameters(
2424
scriptLocation: String
2525
)
2626

2727
object ShellParameters {
2828
def apply(jobParameters: JobParameters): ShellParameters = new ShellParameters(
29-
scriptLocation = Paths.get(ExecutorsConfig.getExecutablesFolder, jobParameters.variables("scriptLocation")).toString
29+
scriptLocation = Paths.get(ShellExecutorConfig.getExecutablesFolder, jobParameters.variables("scriptLocation")).toString
3030
)
3131
}

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/spark/SparkParameters.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ package za.co.absa.hyperdrive.trigger.scheduler.executors.spark
1818
import java.nio.file.Paths
1919

2020
import za.co.absa.hyperdrive.trigger.models.JobParameters
21-
import za.co.absa.hyperdrive.trigger.scheduler.utilities.ExecutorsConfig
21+
import za.co.absa.hyperdrive.trigger.scheduler.utilities.SparkExecutorConfig
2222

2323
import scala.util.Try
2424

@@ -38,12 +38,12 @@ case class SparkParameters(
3838
object SparkParameters {
3939
def apply(jobParameters: JobParameters): SparkParameters = {
4040
SparkParameters(
41-
jobJar = Paths.get(ExecutorsConfig.getExecutablesFolder, jobParameters.variables("jobJar")).toString,
41+
jobJar = Paths.get(SparkExecutorConfig.getExecutablesFolder, jobParameters.variables("jobJar")).toString,
4242
mainClass = jobParameters.variables("mainClass"),
4343
deploymentMode = jobParameters.variables("deploymentMode"),
4444
appArguments = Try(jobParameters.maps("appArguments")).getOrElse(List.empty[String]),
45-
additionalJars = Try(jobParameters.maps("additionalJars")).getOrElse(List.empty[String]).map(jar => Paths.get(ExecutorsConfig.getExecutablesFolder, jar).toString),
46-
additionalFiles = Try(jobParameters.maps("additionalFiles")).getOrElse(List.empty[String]).map(file => Paths.get(ExecutorsConfig.getExecutablesFolder, file).toString),
45+
additionalJars = Try(jobParameters.maps("additionalJars")).getOrElse(List.empty[String]).map(jar => Paths.get(SparkExecutorConfig.getExecutablesFolder, jar).toString),
46+
additionalFiles = Try(jobParameters.maps("additionalFiles")).getOrElse(List.empty[String]).map(file => Paths.get(SparkExecutorConfig.getExecutablesFolder, file).toString),
4747
additionalSparkConfig = Try(jobParameters.keyValuePairs("additionalSparkConfig")).getOrElse(Map.empty[String, String])
4848
)
4949
}

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/utilities/Configs.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,11 @@ object SchedulerConfig {
8888
object ExecutorsConfig {
8989
val getThreadPoolSize: Int =
9090
Configs.conf.getInt("scheduler.executors.thread.pool.size")
91+
}
92+
93+
object ShellExecutorConfig {
9194
val getExecutablesFolder: String =
92-
Configs.conf.getString("scheduler.executors.executablesFolder")
95+
Configs.conf.getString("shellExecutor.executablesFolder")
9396
}
9497

9598
object SparkExecutorConfig {
@@ -107,6 +110,8 @@ object SparkExecutorConfig {
107110
Try(Configs.conf.getString("sparkYarnSink.filesToDeploy").split(",").toSeq).getOrElse(Seq.empty[String])
108111
val getAdditionalConfs: Map[String, String] =
109112
Configs.getMapFromConf("sparkYarnSink.additionalConfs")
113+
val getExecutablesFolder: String =
114+
Configs.conf.getString("sparkYarnSink.executablesFolder")
110115
}
111116

112117
object JobDefinitionConfig {

src/test/resources/application.properties

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,12 @@ auth.inmemory.password=password
2020

2121
appUniqueId=9c282190-4078-4380-8960-ce52f43b94fg
2222

23-
scheduler.executors.executablesFolder=src/test/resources/
2423
scheduler.sensors.changedSensorsChunkQuerySize=100
2524

25+
shellExecutor.executablesFolder=src/test/resources/
26+
27+
sparkYarnSink.executablesFolder=src/test/resources/
28+
2629
kafkaSource.group.id.prefix=hyper_drive
2730
kafkaSource.properties.security.protocol=PLAINTEXT
2831

0 commit comments

Comments
 (0)