diff --git a/build.sbt b/build.sbt index 7de5b327..d6505f97 100644 --- a/build.sbt +++ b/build.sbt @@ -2,14 +2,14 @@ import scala.util.Properties name := """bdg-sequila""" -version := "0.5.3-spark-2.3.3-SNAPSHOT" +version := "0.5.3-spark-2.3.2.3.1.0.0-78-SNAPSHOT" organization := "org.biodatageeks" scalaVersion := "2.11.8" -val DEFAULT_SPARK_2_VERSION = "2.3.3" -val DEFAULT_HADOOP_VERSION = "2.6.5" +val DEFAULT_SPARK_2_VERSION = "2.3.2.3.1.0.0-78" +val DEFAULT_HADOOP_VERSION = "3.1.1" lazy val sparkVersion = Properties.envOrElse("SPARK_VERSION", DEFAULT_SPARK_2_VERSION) @@ -30,14 +30,14 @@ libraryDependencies += "com.holdenkarau" % "spark-testing-base_2.11" % "2.3.2_0. //libraryDependencies += "org.apache.spark" %% "spark-hive" % "2.0.0" % "test" -libraryDependencies += "org.bdgenomics.adam" %% "adam-core-spark2" % "0.24.0" -libraryDependencies += "org.bdgenomics.adam" %% "adam-apis-spark2" % "0.24.0" -libraryDependencies += "org.bdgenomics.adam" %% "adam-cli-spark2" % "0.24.0" +libraryDependencies += "org.bdgenomics.adam" %% "adam-core-spark2" % "0.24.0" excludeAll (ExclusionRule("org.apache.hadoop")) +libraryDependencies += "org.bdgenomics.adam" %% "adam-apis-spark2" % "0.24.0" excludeAll (ExclusionRule("org.apache.hadoop")) +libraryDependencies += "org.bdgenomics.adam" %% "adam-cli-spark2" % "0.24.0" excludeAll (ExclusionRule("org.apache.hadoop")) libraryDependencies += "org.scala-lang" % "scala-library" % "2.11.8" libraryDependencies += "org.rogach" %% "scallop" % "3.1.2" -libraryDependencies += "org.hammerlab.bdg-utils" %% "cli" % "0.3.0" +libraryDependencies += "org.hammerlab.bdg-utils" %% "cli" % "0.3.0" excludeAll (ExclusionRule("org.apache.hadoop")) libraryDependencies += "com.github.samtools" % "htsjdk" % "2.18.2" @@ -51,7 +51,7 @@ libraryDependencies += "org.apache.logging.log4j" % "log4j-api" % "2.11.0" libraryDependencies += "com.intel.gkl" % "gkl" % "0.8.5-1-darwin-SNAPSHOT" libraryDependencies += "com.intel.gkl" % "gkl" % "0.8.5-1-linux-SNAPSHOT" -libraryDependencies += "org.hammerlab.bam" %% "load" % "1.2.0-M1" +libraryDependencies += "org.hammerlab.bam" %% "load" % "1.2.0-M1" excludeAll (ExclusionRule("org.apache.hadoop")) libraryDependencies += "de.ruedigermoeller" % "fst" % "2.57" libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.7" @@ -59,7 +59,7 @@ libraryDependencies += "org.eclipse.jetty" % "jetty-servlet" % "9.3.24.v20180605 libraryDependencies += "org.apache.derby" % "derbyclient" % "10.14.2.0" -libraryDependencies += "org.biodatageeks" % "bdg-performance_2.11" % "0.2-spark-2.3.3-SNAPSHOT" excludeAll (ExclusionRule("org.apache.hadoop")) +//libraryDependencies += "org.biodatageeks" % "bdg-performance_2.11" % "0.2-spark-2.3.3-SNAPSHOT" excludeAll (ExclusionRule("org.apache.hadoop")) @@ -90,7 +90,7 @@ resolvers ++= Seq( "Job Server Bintray" at "https://dl.bintray.com/spark-jobserver/maven", "zsibio-snapshots" at "http://zsibio.ii.pw.edu.pl/nexus/repository/maven-snapshots/", "spring" at "http://repo.spring.io/libs-milestone/", - "Cloudera" at "https://repository.cloudera.com/content/repositories/releases/", + "confluent" at "http://packages.confluent.io/maven/", "Hortonworks" at "http://repo.hortonworks.com/content/repositories/releases/" ) @@ -102,10 +102,12 @@ assemblyMergeStrategy in assembly := { case PathList("com", xs@_*) => MergeStrategy.first case PathList("shadeio", xs@_*) => MergeStrategy.first + case PathList(ps @ _*) if ps.last endsWith ".class" => MergeStrategy.first case PathList("au", xs@_*) => MergeStrategy.first case ("META-INF/org/apache/logging/log4j/core/config/plugins/Log4j2Plugins.dat") => MergeStrategy.first case ("images/ant_logo_large.gif") => MergeStrategy.first + case "overview.html" => MergeStrategy.rename case "mapred-default.xml" => MergeStrategy.last case "about.html" => MergeStrategy.rename diff --git a/src/main/scala/org/biodatageeks/hive/ThriftServerPageSeq.scala b/src/main/scala/org/biodatageeks/hive/ThriftServerPageSeq.scala index 976f23ce..af784621 100644 --- a/src/main/scala/org/biodatageeks/hive/ThriftServerPageSeq.scala +++ b/src/main/scala/org/biodatageeks/hive/ThriftServerPageSeq.scala @@ -27,10 +27,10 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP {listener.getOnlineSessionNum} session(s) are online, running {listener.getTotalRunning} SQL statement(s) ++ - generateSessionStatsTable() ++ - generateSQLStatsTable() + generateSessionStatsTable(request) ++ + generateSQLStatsTable(request) } - UIUtils.headerSparkPage("JDBC/ODBC Server", content, parent, Some(5000)) + UIUtils.headerSparkPage(request,"JDBC/ODBC Server", content, parent, Some(5000)) } /** Generate basic stats of the thrift server program */ @@ -47,16 +47,16 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP } /** Generate stats of batch statements of the thrift server program */ - private def generateSQLStatsTable(): Seq[Node] = { + private def generateSQLStatsTable(request: HttpServletRequest): Seq[Node] = { val numStatement = listener.getExecutionList.size val table = if (numStatement > 0) { val headerRow = Seq("User", "JobID", "GroupID", "Start Time", "Finish Time", "Duration", "Statement", "State", "Detail") - val dataRows = listener.getExecutionList + val dataRows = listener.getExecutionList.sortBy(_.startTimestamp).reverse def generateDataRow(info: ExecutionInfo): Seq[Node] = { val jobLink = info.jobId.map { id: String => - + [{id}] } @@ -118,16 +118,17 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP } /** Generate stats of batch sessions of the thrift server program */ - private def generateSessionStatsTable(): Seq[Node] = { + private def generateSessionStatsTable(request: HttpServletRequest): Seq[Node] = { val sessionList = listener.getSessionList val numBatches = sessionList.size val table = if (numBatches > 0) { - val dataRows = sessionList + val dataRows = sessionList.sortBy(_.startTimestamp).reverse val headerRow = Seq("User", "IP", "Session ID", "Start Time", "Finish Time", "Duration", "Total Execute") + def generateDataRow(session: SessionInfo): Seq[Node] = { val sessionLink = "%s/%s/session?id=%s" - .format(UIUtils.prependBaseUri(parent.basePath), parent.prefix, session.sessionId) + .format(UIUtils.prependBaseUri(request,parent.basePath), parent.prefix, session.sessionId)