diff --git a/CHANGELOG.md b/CHANGELOG.md index 375654a328b..541f8740103 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,43 @@ # Cromwell Change Log +## 75 Release Notes + +### New `AwaitingCloudQuota` backend status + +For Cloud Life Sciences v2beta only. + +When a user's GCP project reaches a quota limit, Cromwell continues to submit jobs and Life Sciences acknowledges them as created even if the physical VM cannot yet start. Cromwell now detects this condition in the backend and reports `AwaitingCloudQuota`. + +The status is informational and does not require any action. Users wishing to maximize throughput can use `AwaitingCloudQuota` as an indication they should check quota in Cloud Console and request a quota increase from GCP. + +`AwaitingCloudQuota` will appear between the `Initializing` and `Running` backend statuses, and will be skipped if not applicable. + +Now: + +| Status in metadata |Quota normal| Quota delay | Status meaning | +|--------------------|----|----------------------|---------------------------------------------------| +| `executionStatus` |`Running`| `Running` | Job state Cromwell is requesting from the backend | +| `backendStatus` |`Running`| `AwaitingCloudQuota` | Job state reported by backend | + +Previously: + +| Status in metadata |Quota normal|Quota delay| Status meaning | +|--------------------|----|----|-----------------------------------------------------------| +| `executionStatus` |`Running`|`Running`| Job state Cromwell is requesting from the backend | +| `backendStatus` |`Running`|`Running`| Job state reported by backend | + +### New 'requestedWorkflowId' API Option + +Allows users to choose their own workflow IDs at workflow submission time. + +If supplied for single workflows, this value must be a JSON string containing a valid, and not already used, UUID. For batch submissions, this value must be a JSON array of valid UUIDs. + +If not supplied, the behavior is as today: Cromwell will generate a random workflow ID for every workflow submitted. + +### Bug Fixes + +* Fixed a bug on Google Pipelines API backends where missing optional output files (`File?`) were not correctly detected by Cromwell and caused invalid call cache entries to be written. + ## 73 Release Notes ### Workflow Restart Performance Improvements diff --git a/CromIAM/src/main/resources/swagger/cromiam.yaml b/CromIAM/src/main/resources/swagger/cromiam.yaml index 47b56fc8cb1..c6bf57fa609 100644 --- a/CromIAM/src/main/resources/swagger/cromiam.yaml +++ b/CromIAM/src/main/resources/swagger/cromiam.yaml @@ -135,6 +135,11 @@ paths: required: false type: file in: formData + - name: requestedWorkflowId + description: An ID to assign to this workflow. Must be a JSON string in UUID-format. If not supplied a random ID will be generated for the workflow. + required: false + type: string + in: formData tags: - Workflows responses: @@ -198,6 +203,11 @@ paths: required: false type: file in: formData + - name: requestedWorkflowId + description: A set of IDs to assign to these workflows. Must be a JSON list of strings in UUID-format. Must have the same number of entries and be in the same order as the workflow inputs list. If not supplied, random ID will be generated for the workflows. + required: false + type: string + in: formData tags: - Workflows responses: diff --git a/backend/src/test/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManagerSpec.scala b/backend/src/test/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManagerSpec.scala index 66ead4a76ac..fdfa7f97e04 100644 --- a/backend/src/test/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManagerSpec.scala +++ b/backend/src/test/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManagerSpec.scala @@ -23,7 +23,8 @@ class CallCachingBlacklistManagerSpec extends AnyFlatSpec with CromwellTimeoutSp workflowOptions = WorkflowOptions(JsObject.empty), labelsJson = "", workflowOnHold = false, - warnings = List.empty + warnings = List.empty, + requestedWorkflowId = None ) val workflowSourcesYesGrouping = workflowSourcesNoGrouping.copy( diff --git a/centaur/src/main/resources/standardTestCases/backendWithNoDocker.test b/centaur/src/main/resources/standardTestCases/backendWithNoDocker.test index 7c2ef56192f..493cd9988d8 100644 --- a/centaur/src/main/resources/standardTestCases/backendWithNoDocker.test +++ b/centaur/src/main/resources/standardTestCases/backendWithNoDocker.test @@ -2,6 +2,9 @@ name: backendWithNoDocker backends: [LocalNoDocker] testFormat: runtwiceexpectingcallcaching +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: backendWithNoDocker/backendWithNoDocker.wdl } diff --git a/centaur/src/main/resources/standardTestCases/cacheBetweenWf.test b/centaur/src/main/resources/standardTestCases/cacheBetweenWf.test index c366b0c22bd..53c1847c686 100644 --- a/centaur/src/main/resources/standardTestCases/cacheBetweenWf.test +++ b/centaur/src/main/resources/standardTestCases/cacheBetweenWf.test @@ -1,6 +1,9 @@ name: cacheBetweenWF testFormat: runtwiceexpectingcallcaching +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: cacheBetweenWF/cacheBetweenWF.wdl options: common_options/cache_read_off_write_on.options diff --git a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_empty_hint_papi.test b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_empty_hint_papi.test index d9c7ee419ed..f148f234dba 100644 --- a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_empty_hint_papi.test +++ b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_empty_hint_papi.test @@ -3,6 +3,9 @@ name: call_cache_hit_prefixes_empty_hint_papi testFormat: runtwiceexpectingcallcaching backends: [Papi] +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: call_cache_hit_prefixes/call_cache_hit_prefixes.wdl inputs: call_cache_hit_prefixes/call_cache_hit_prefixes_empty_hint.inputs diff --git a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_no_hint.test b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_no_hint.test index cd35a78dbee..e4aca42b5cf 100644 --- a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_no_hint.test +++ b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_no_hint.test @@ -2,6 +2,9 @@ name: call_cache_hit_prefixes_no_hint testFormat: runtwiceexpectingcallcaching +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: call_cache_hit_prefixes/call_cache_hit_prefixes.wdl inputs: call_cache_hit_prefixes/call_cache_hit_prefixes_no_hint.inputs diff --git a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi.test b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi.test index a0ef536165e..8558f7fec8a 100644 --- a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi.test +++ b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi.test @@ -5,6 +5,9 @@ name: call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi testFormat: runthriceexpectingcallcaching backends: [Papi] +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: call_cache_hit_prefixes/call_cache_hit_prefixes.wdl inputs: call_cache_hit_prefixes/call_cache_hit_prefixes_two_roots_empty_hint_hit_papi.inputs diff --git a/centaur/src/main/resources/standardTestCases/cwl_cache_between_workflows.test b/centaur/src/main/resources/standardTestCases/cwl_cache_between_workflows.test index e5381017829..1e3b6065617 100644 --- a/centaur/src/main/resources/standardTestCases/cwl_cache_between_workflows.test +++ b/centaur/src/main/resources/standardTestCases/cwl_cache_between_workflows.test @@ -5,6 +5,9 @@ workflowType: CWL workflowTypeVersion: v1.0 skipDescribeEndpointValidation: true +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: cwl_cache_between_workflows/cwl_cache_between_workflows.cwl inputs: cwl_cache_between_workflows/cwl_cache_between_workflows.json diff --git a/centaur/src/main/resources/standardTestCases/floating_tags.test b/centaur/src/main/resources/standardTestCases/floating_tags.test index fc2c077d0a2..f4be0030b4f 100644 --- a/centaur/src/main/resources/standardTestCases/floating_tags.test +++ b/centaur/src/main/resources/standardTestCases/floating_tags.test @@ -1,6 +1,9 @@ name: floating_tags testFormat: runtwiceexpectingcallcaching +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: floating_tags/floating_tags.wdl options: floating_tags/floating_tags.options diff --git a/centaur/src/main/resources/standardTestCases/fofn_caching.test b/centaur/src/main/resources/standardTestCases/fofn_caching.test index 1864bce29d5..115f3dd476b 100644 --- a/centaur/src/main/resources/standardTestCases/fofn_caching.test +++ b/centaur/src/main/resources/standardTestCases/fofn_caching.test @@ -2,6 +2,9 @@ name: fofn_caching testFormat: runtwiceexpectingcallcaching backends: [Papi-Caching-No-Copy] +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: fofn_caching/fofn_caching.wdl } diff --git a/centaur/src/main/resources/standardTestCases/google_artifact_registry.test b/centaur/src/main/resources/standardTestCases/google_artifact_registry.test index 44d1e2a725c..384dede4973 100644 --- a/centaur/src/main/resources/standardTestCases/google_artifact_registry.test +++ b/centaur/src/main/resources/standardTestCases/google_artifact_registry.test @@ -1,6 +1,9 @@ name: google_artifact_registry testFormat: runtwiceexpectingcallcaching +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: google_artifact_registry/google_artifact_registry.wdl } diff --git a/centaur/src/main/resources/standardTestCases/hello_private_repo.test b/centaur/src/main/resources/standardTestCases/hello_private_repo.test index 00be69dd7f9..5ec7aa6a46e 100644 --- a/centaur/src/main/resources/standardTestCases/hello_private_repo.test +++ b/centaur/src/main/resources/standardTestCases/hello_private_repo.test @@ -2,6 +2,9 @@ name: hello_private_repo testFormat: runtwiceexpectingcallcaching backends: [LocalDockerSecure] +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: hello_private_repo/hello_private_repo.wdl inputs: hello_private_repo/hello_private_repo.inputs.json diff --git a/centaur/src/main/resources/standardTestCases/use_cacheCopy_dir.test b/centaur/src/main/resources/standardTestCases/use_cacheCopy_dir.test index 1b33b90fe22..70bfef2594f 100644 --- a/centaur/src/main/resources/standardTestCases/use_cacheCopy_dir.test +++ b/centaur/src/main/resources/standardTestCases/use_cacheCopy_dir.test @@ -2,6 +2,9 @@ name: use_cache_copy_dir testFormat: runtwiceexpectingcallcaching backends: [Papiv2] +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + files { workflow: use_cacheCopy_dir/use_cacheCopy_dir.wdl } diff --git a/centaur/src/main/resources/standardTestCases/wdl_optional_outputs_call_caching.test b/centaur/src/main/resources/standardTestCases/wdl_optional_outputs_call_caching.test new file mode 100644 index 00000000000..5a982fc2f03 --- /dev/null +++ b/centaur/src/main/resources/standardTestCases/wdl_optional_outputs_call_caching.test @@ -0,0 +1,16 @@ +name: wdl_optional_outputs_call_caching +testFormat: runtwiceexpectingcallcaching +backends: [Papiv2] + +# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run +retryTestFailures: false + +files { + workflow: wdl_optional_outputs_call_caching/wdl_optional_outputs_call_caching.wdl +} + +metadata { + workflowName: missing_optional_output + status: Succeeded + "calls.missing_optional_output.do_and_do_not_output.callCaching.result": "Cache Hit: <>:missing_optional_output.do_and_do_not_output:-1" +} diff --git a/centaur/src/main/resources/standardTestCases/wdl_optional_outputs_call_caching/wdl_optional_outputs_call_caching.wdl b/centaur/src/main/resources/standardTestCases/wdl_optional_outputs_call_caching/wdl_optional_outputs_call_caching.wdl new file mode 100644 index 00000000000..f9ac709042e --- /dev/null +++ b/centaur/src/main/resources/standardTestCases/wdl_optional_outputs_call_caching/wdl_optional_outputs_call_caching.wdl @@ -0,0 +1,22 @@ +version 1.0 + +task do_and_do_not_output { + command <<< + touch do_output.txt + >>> + runtime { + docker: "ubuntu" + } + output { + File? do_not_output = "do_not_output.txt" + File? do_output = "do_output.txt" + } +} + +workflow missing_optional_output { + call do_and_do_not_output + output { + File? should_be_present = do_and_do_not_output.do_output + File? should_be_null = do_and_do_not_output.do_not_output + } +} diff --git a/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala b/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala index 9359eab5430..ee936b86819 100644 --- a/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala +++ b/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala @@ -16,6 +16,7 @@ sealed trait WorkflowSourceFilesCollection { def workflowType: Option[WorkflowType] def workflowTypeVersion: Option[WorkflowTypeVersion] def workflowOnHold: Boolean + def requestedWorkflowId: Option[WorkflowId] def warnings: Seq[String] @@ -49,7 +50,8 @@ object WorkflowSourceFilesCollection { labelsJson: WorkflowJson, importsFile: Option[Array[Byte]], workflowOnHold: Boolean, - warnings: Seq[String]): WorkflowSourceFilesCollection = importsFile match { + warnings: Seq[String], + requestedWorkflowId: Option[WorkflowId]): WorkflowSourceFilesCollection = importsFile match { case Some(imports) => WorkflowSourceFilesWithDependenciesZip( workflowSource = workflowSource, @@ -62,7 +64,8 @@ object WorkflowSourceFilesCollection { labelsJson = labelsJson, importsZip = imports, workflowOnHold = workflowOnHold, - warnings = warnings) + warnings = warnings, + requestedWorkflowId = requestedWorkflowId) case None => WorkflowSourceFilesWithoutImports( workflowSource = workflowSource, @@ -74,7 +77,8 @@ object WorkflowSourceFilesCollection { workflowOptions = workflowOptions, labelsJson = labelsJson, workflowOnHold = workflowOnHold, - warnings = warnings) + warnings = warnings, + requestedWorkflowId = requestedWorkflowId) } } @@ -87,7 +91,8 @@ final case class WorkflowSourceFilesWithoutImports(workflowSource: Option[Workfl workflowOptions: WorkflowOptions, labelsJson: WorkflowJson, workflowOnHold: Boolean = false, - warnings: Seq[String]) extends WorkflowSourceFilesCollection + warnings: Seq[String], + requestedWorkflowId: Option[WorkflowId]) extends WorkflowSourceFilesCollection final case class WorkflowSourceFilesWithDependenciesZip(workflowSource: Option[WorkflowSource], workflowUrl: Option[WorkflowUrl], @@ -99,7 +104,8 @@ final case class WorkflowSourceFilesWithDependenciesZip(workflowSource: Option[W labelsJson: WorkflowJson, importsZip: Array[Byte], workflowOnHold: Boolean = false, - warnings: Seq[String]) extends WorkflowSourceFilesCollection { + warnings: Seq[String], + requestedWorkflowId: Option[WorkflowId]) extends WorkflowSourceFilesCollection { override def toString = { s"WorkflowSourceFilesWithDependenciesZip($workflowSource, $workflowUrl, $workflowType, $workflowTypeVersion," + s""" $inputsJson, ${workflowOptions.asPrettyJson}, $labelsJson, <>, $warnings)""" diff --git a/core/src/test/scala/cromwell/util/SampleWdl.scala b/core/src/test/scala/cromwell/util/SampleWdl.scala index d040a883e11..4b2f773bf63 100644 --- a/core/src/test/scala/cromwell/util/SampleWdl.scala +++ b/core/src/test/scala/cromwell/util/SampleWdl.scala @@ -44,7 +44,8 @@ trait SampleWdl extends TestFileUtil { workflowTypeVersion = workflowTypeVersion, warnings = Vector.empty, workflowOnHold = workflowOnHold, - importsZip = zip) + importsZip = zip, + requestedWorkflowId = None) case None => WorkflowSourceFilesWithoutImports( workflowSource = Option(workflowSource(runtime)), @@ -56,7 +57,8 @@ trait SampleWdl extends TestFileUtil { workflowType = workflowType, workflowTypeVersion = workflowTypeVersion, warnings = Vector.empty, - workflowOnHold = workflowOnHold) + workflowOnHold = workflowOnHold, + requestedWorkflowId = None) } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala index e54974f6618..f95e3d1a7ae 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala @@ -1,18 +1,17 @@ package cromwell.database.slick -import java.sql.{Connection, PreparedStatement, Statement} -import java.util.concurrent.{ExecutorService, Executors} - import com.mysql.cj.jdbc.exceptions.MySQLTransactionRollbackException import com.typesafe.config.{Config, ConfigFactory} import cromwell.database.slick.tables.DataAccessComponent import cromwell.database.sql.SqlDatabase import net.ceedubs.ficus.Ficus._ -import org.postgresql.util.{PSQLException, ServerErrorMessage} -import org.slf4j.LoggerFactory +import org.postgresql.util.PSQLException +import org.slf4j.{Logger, LoggerFactory} import slick.basic.DatabaseConfig import slick.jdbc.{JdbcCapabilities, JdbcProfile, PostgresProfile, TransactionIsolation} +import java.sql.{Connection, PreparedStatement, Statement} +import java.util.concurrent.{ExecutorService, Executors} import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} @@ -20,9 +19,9 @@ object SlickDatabase { /** * Returns either the "url" or "properties.url" */ - def urlKey(config: Config) = if (config.hasPath("db.url")) "db.url" else "db.properties.url" + def urlKey(config: Config): String = if (config.hasPath("db.url")) "db.url" else "db.properties.url" - lazy val log = LoggerFactory.getLogger("cromwell.database.slick") + lazy val log: Logger = LoggerFactory.getLogger("cromwell.database.slick") def createSchema(slickDatabase: SlickDatabase): Unit = { // NOTE: Slick 3.0.0 schema creation, Clobs, and MySQL don't mix: https://github.com/slick/slick/issues/637 @@ -57,7 +56,7 @@ object SlickDatabase { */ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extends SqlDatabase { - override val urlKey = SlickDatabase.urlKey(originalDatabaseConfig) + override val urlKey: String = SlickDatabase.urlKey(originalDatabaseConfig) protected val slickConfig = DatabaseConfig.forConfig[JdbcProfile]("", databaseConfig) /* @@ -73,7 +72,7 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend // NOTE: if you want to refactor database is inner-class type: this.dataAccess.driver.backend.DatabaseFactory val database = slickConfig.db - override lazy val connectionDescription = databaseConfig.getString(urlKey) + override lazy val connectionDescription: String = databaseConfig.getString(urlKey) SlickDatabase.log.info(s"Running with database $urlKey = $connectionDescription") @@ -134,10 +133,12 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend actionThreadPool, database.executor.executionContext.reportFailure ) - protected[this] lazy val insertBatchSize = databaseConfig.getOrElse("insert-batch-size", 2000) + protected[this] lazy val insertBatchSize: Int = databaseConfig.getOrElse("insert-batch-size", 2000) - protected[this] lazy val useSlickUpserts = dataAccess.driver.capabilities.contains(JdbcCapabilities.insertOrUpdate) + protected[this] lazy val useSlickUpserts: Boolean = + dataAccess.driver.capabilities.contains(JdbcCapabilities.insertOrUpdate) + //noinspection SameParameterValue protected[this] def assertUpdateCount(description: String, updates: Int, expected: Int): DBIO[Unit] = { if (updates == expected) { DBIO.successful(()) @@ -220,20 +221,11 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend /* The exception may contain possibly sensitive row contents within the DETAIL section. Remove it. - Tried adjusting this using configuration: - - log_error_verbosity=TERSE - - log_min_messages=PANIC - - client_min_messages=ERROR - - Instead resorting to reflection. + Discussion: https://github.com/pgjdbc/pgjdbc/issues/1577 */ val message = pSQLException.getServerErrorMessage - val field = classOf[ServerErrorMessage].getDeclaredField("mesgParts") - field.setAccessible(true) - val parts = field.get(message).asInstanceOf[java.util.Map[Character, String]] - parts.remove('D') // The original exception has already stored the DETAIL into a string. So we must create a new Exception. - throw new PSQLException(message) + throw new PSQLException(message, false) } } }(actionExecutionContext) diff --git a/database/sql/src/main/scala/cromwell/database/slick/WorkflowStoreSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/WorkflowStoreSlickDatabase.scala index e0e19ee97aa..5d88d0dbec8 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/WorkflowStoreSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/WorkflowStoreSlickDatabase.scala @@ -154,4 +154,8 @@ trait WorkflowStoreSlickDatabase extends WorkflowStoreSqlDatabase { override def findWorkflows(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[String]] = { runTransaction(dataAccess.findWorkflows(cromwellId).result) } + + override def checkWhetherWorkflowExists(workflowId: String)(implicit ec: ExecutionContext): Future[Boolean] = { + runTransaction(dataAccess.checkExists(workflowId).result.map(_.nonEmpty)) + } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala index 829911af13e..3fb6ec48cdd 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala @@ -178,6 +178,7 @@ trait WorkflowStoreEntryComponent { } ) + // Find workflows running on a given Cromwell instance with abort requested: val findWorkflowsWithAbortRequested = Compiled( (cromwellId: Rep[String]) => for { workflowStoreEntry <- workflowStoreEntries @@ -185,10 +186,18 @@ trait WorkflowStoreEntryComponent { } yield workflowStoreEntry.workflowExecutionUuid ) + // Find workflows running on a given Cromwell instance: val findWorkflows = Compiled( (cromwellId: Rep[String]) => for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.cromwellId === cromwellId } yield workflowStoreEntry.workflowExecutionUuid ) + + val checkExists = Compiled( + (workflowId: Rep[String]) => (for { + workflowStoreEntry <- workflowStoreEntries + if workflowStoreEntry.workflowExecutionUuid === workflowId + } yield 1) + ) } diff --git a/database/sql/src/main/scala/cromwell/database/sql/WorkflowStoreSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/WorkflowStoreSqlDatabase.scala index 821db0ac9c6..a8312f6d685 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/WorkflowStoreSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/WorkflowStoreSqlDatabase.scala @@ -104,4 +104,6 @@ ____ __ ____ ______ .______ __ ___ _______ __ ______ def findWorkflows(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[String]] + def checkWhetherWorkflowExists(cromwellId: String)(implicit ec: ExecutionContext): Future[Boolean] + } diff --git a/docs/api/RESTAPI.md b/docs/api/RESTAPI.md index 5731f363a09..d74f09a00d7 100644 --- a/docs/api/RESTAPI.md +++ b/docs/api/RESTAPI.md @@ -1,5 +1,5 @@