Skip to content
This repository has been archived by the owner on Nov 28, 2020. It is now read-only.

Commit

Permalink
Fixing deps
Browse files Browse the repository at this point in the history
  • Loading branch information
mwiewior committed Mar 10, 2019
1 parent 37948e7 commit dedb948
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 23 deletions.
16 changes: 8 additions & 8 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@ import scala.util.Properties

name := """bdg-sequila"""

version := "0.5.3-spark-2.4.0-SNAPSHOT"
version := "0.5.3-spark-2.3.3-SNAPSHOT"

organization := "org.biodatageeks"

scalaVersion := "2.11.8"

val DEFAULT_SPARK_2_VERSION = "2.4.0"
val DEFAULT_SPARK_2_VERSION = "2.3.3"
val DEFAULT_HADOOP_VERSION = "2.6.5"


Expand All @@ -24,15 +24,15 @@ libraryDependencies += "org.apache.spark" % "spark-core_2.11" % sparkVersion

libraryDependencies += "org.apache.spark" % "spark-sql_2.11" % sparkVersion
libraryDependencies += "org.apache.spark" %% "spark-hive" % sparkVersion
libraryDependencies += "org.apache.spark" %% "spark-hive-thriftserver" % "2.4.0"
libraryDependencies += "org.apache.spark" %% "spark-hive-thriftserver" % sparkVersion

libraryDependencies += "com.holdenkarau" % "spark-testing-base_2.11" % "2.4.0_0.11.0" % "test" excludeAll ExclusionRule(organization = "javax.servlet") excludeAll (ExclusionRule("org.apache.hadoop"))
libraryDependencies += "com.holdenkarau" % "spark-testing-base_2.11" % "2.3.2_0.11.0" % "test" excludeAll ExclusionRule(organization = "javax.servlet") excludeAll (ExclusionRule("org.apache.hadoop"))

//libraryDependencies += "org.apache.spark" %% "spark-hive" % "2.0.0" % "test"

libraryDependencies += "org.bdgenomics.adam" %% "adam-core-spark2" % "0.25.0"
libraryDependencies += "org.bdgenomics.adam" %% "adam-apis-spark2" % "0.25.0"
libraryDependencies += "org.bdgenomics.adam" %% "adam-cli-spark2" % "0.25.0"
libraryDependencies += "org.bdgenomics.adam" %% "adam-core-spark2" % "0.24.0"
libraryDependencies += "org.bdgenomics.adam" %% "adam-apis-spark2" % "0.24.0"
libraryDependencies += "org.bdgenomics.adam" %% "adam-cli-spark2" % "0.24.0"
libraryDependencies += "org.scala-lang" % "scala-library" % "2.11.8"
libraryDependencies += "org.rogach" %% "scallop" % "3.1.2"

Expand All @@ -59,7 +59,7 @@ libraryDependencies += "org.eclipse.jetty" % "jetty-servlet" % "9.3.24.v20180605
libraryDependencies += "org.apache.derby" % "derbyclient" % "10.14.2.0"


libraryDependencies += "org.biodatageeks" % "bdg-performance_2.11" % "0.2-SNAPSHOT" excludeAll (ExclusionRule("org.apache.hadoop"))
libraryDependencies += "org.biodatageeks" % "bdg-performance_2.11" % "0.2-spark-2.3.3-SNAPSHOT" excludeAll (ExclusionRule("org.apache.hadoop"))



Expand Down
21 changes: 10 additions & 11 deletions src/main/scala/org/biodatageeks/hive/ThriftServerPageSeq.scala
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP
{listener.getOnlineSessionNum} session(s) are online,
running {listener.getTotalRunning} SQL statement(s)
</h4> ++
generateSessionStatsTable(request) ++
generateSQLStatsTable(request)
generateSessionStatsTable() ++
generateSQLStatsTable()
}
UIUtils.headerSparkPage(request,"JDBC/ODBC Server", content, parent, Some(5000))
UIUtils.headerSparkPage("JDBC/ODBC Server", content, parent, Some(5000))
}

/** Generate basic stats of the thrift server program */
Expand All @@ -47,16 +47,16 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP
}

/** Generate stats of batch statements of the thrift server program */
private def generateSQLStatsTable(request: HttpServletRequest): Seq[Node] = {
private def generateSQLStatsTable(): Seq[Node] = {
val numStatement = listener.getExecutionList.size
val table = if (numStatement > 0) {
val headerRow = Seq("User", "JobID", "GroupID", "Start Time", "Finish Time", "Duration",
"Statement", "State", "Detail")
val dataRows = listener.getExecutionList.sortBy(_.startTimestamp).reverse
val dataRows = listener.getExecutionList

def generateDataRow(info: ExecutionInfo): Seq[Node] = {
val jobLink = info.jobId.map { id: String =>
<a href={"%s/jobs/job?id=%s".format(UIUtils.prependBaseUri(request,parent.basePath), id)}>
<a href={"%s/jobs/job?id=%s".format(UIUtils.prependBaseUri(parent.basePath), id)}>
[{id}]
</a>
}
Expand Down Expand Up @@ -118,17 +118,16 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP
}

/** Generate stats of batch sessions of the thrift server program */
private def generateSessionStatsTable(request: HttpServletRequest): Seq[Node] = {
private def generateSessionStatsTable(): Seq[Node] = {
val sessionList = listener.getSessionList
val numBatches = sessionList.size
val table = if (numBatches > 0) {
val dataRows = sessionList.sortBy(_.startTimestamp).reverse
val dataRows = sessionList
val headerRow = Seq("User", "IP", "Session ID", "Start Time", "Finish Time", "Duration",
"Total Execute")

def generateDataRow(session: SessionInfo): Seq[Node] = {
val sessionLink = "%s/%s/session?id=%s"
.format(UIUtils.prependBaseUri(request,parent.basePath), parent.prefix, session.sessionId)
.format(UIUtils.prependBaseUri(parent.basePath), parent.prefix, session.sessionId)
<tr>
<td> {session.userName} </td>
<td> {session.ip} </td>
Expand Down Expand Up @@ -170,4 +169,4 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP
}
UIUtils.listingTable(headers, generateDataRow, data, fixedWidth = true)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ private[ui] class ThriftServerSessionPageSeq(parent: ThriftServerTabSeq)
Session created at {formatDate(sessionStat.startTimestamp)},
Total run {sessionStat.totalExecution} SQL
</h4> ++
generateSQLStatsTable(request,sessionStat.sessionId)
generateSQLStatsTable(sessionStat.sessionId)
}
UIUtils.headerSparkPage(request,"JDBC/ODBC Session", content, parent, Some(5000))
UIUtils.headerSparkPage("JDBC/ODBC Session", content, parent, Some(5000))
}

/** Generate basic stats of the thrift server program */
Expand All @@ -59,7 +59,7 @@ private[ui] class ThriftServerSessionPageSeq(parent: ThriftServerTabSeq)
}

/** Generate stats of batch statements of the thrift server program */
private def generateSQLStatsTable(request: HttpServletRequest, sessionID: String): Seq[Node] = {
private def generateSQLStatsTable(sessionID: String): Seq[Node] = {
val executionList = listener.getExecutionList
.filter(_.sessionId == sessionID)
val numStatement = executionList.size
Expand All @@ -70,7 +70,7 @@ private[ui] class ThriftServerSessionPageSeq(parent: ThriftServerTabSeq)

def generateDataRow(info: ExecutionInfo): Seq[Node] = {
val jobLink = info.jobId.map { id: String =>
<a href={"%s/jobs/job?id=%s".format(UIUtils.prependBaseUri(request,parent.basePath), id)}>
<a href={"%s/jobs/job?id=%s".format(UIUtils.prependBaseUri(parent.basePath), id)}>
[{id}]
</a>
}
Expand Down

0 comments on commit dedb948

Please sign in to comment.