Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
kshakir committed Sep 16, 2019
2 parents d46ff9f + b6111f6 commit b61c136
Show file tree
Hide file tree
Showing 189 changed files with 4,327 additions and 2,225 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ tags
.idea/inspectionProfiles/*
!/.idea/inspectionProfiles/Project_Default.xml
target
/site

# custom config
cromwell-executions
Expand Down
15 changes: 12 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,18 @@ env:
BUILD_MYSQL=5.7
- >-
BUILD_TYPE=sbt
BUILD_MYSQL=5.7
BUILD_POSTGRESQL=11.3
BUILD_MARIADB=10.3
# The below list of docker tags should be synced with the tags in DatabaseTestKit.getDatabaseSystemSettings
- >-
BUILD_TYPE=dbms
BUILD_MARIADB=5.5
BUILD_MARIADB_LATEST=latest
BUILD_MYSQL=5.6
BUILD_MYSQL_LATEST=latest
BUILD_POSTGRESQL=9.5
BUILD_POSTGRESQL_LATEST=latest
# The list above of docker tags should be synced with the tags in DatabaseTestKit.getDatabaseSystemSettings
- >-
BUILD_TYPE=singleWorkflowRunner
script:
- src/ci/bin/test.sh
notifications:
Expand Down
27 changes: 27 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,25 @@
# Cromwell Change Log

## 46 Release Notes

### Nvidia GPU Driver Update

The default driver for Nvidia GPU's on Google Cloud has been updated from `390` to `418.87.00`. A user may override this option at anytime by providing the `nvidiaDriverVersion` runtime attribute. See the [Runtime Attribute description for GPUs](https://cromwell.readthedocs.io/en/stable/RuntimeAttributes/#runtime-attribute-descriptions) for detailed information.

### Enhanced "error code 10" handling in PAPIv2

On Google Pipelines API v2, a worker VM that is preempted may emit a generic error message like
```
PAPI error code 10. The assigned worker has failed to complete the operation
```
instead of a preemption-specific message like
```
PAPI error code 14. Task was preempted for the 2nd time.
```
Cromwell 44 introduced special handling that detects both preemption indicators and re-runs the job consistent with the `preemptible` setting.

Cromwell 46 enhances this handling in response to user reports of possible continued issues.

## 45 Release Notes

### Improved input and output transfer performance on PAPI v2
Expand All @@ -25,6 +45,13 @@ Globs can be used to define outputs for BCS backend.
#### NAS mount
Alibaba Cloud NAS is now supported for the `mounts` runtime attribute.

### Call Caching Failure Messages [(#5095)](https://github.com/broadinstitute/cromwell/pull/5095)

Call cache failures are no longer sent to the workflow metadata. Instead a limited number of call cache failure messages
will be sent to the workflow log. See [the Cromwell call caching
documentation](https://cromwell.readthedocs.io/en/stable/cromwell_features/CallCaching/) for more information on call
cache failure logging.

## 44 Release Notes

### Improved PAPI v2 Preemptible VM Support
Expand Down
4 changes: 3 additions & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -383,6 +383,7 @@ lazy val root = (project in file("."))
.aggregate(`cloud-nio-impl-ftp`)
.aggregate(`cloud-nio-spi`)
.aggregate(`cloud-nio-util`)
.aggregate(`cromwell-drs-localizer`)
.aggregate(awsBackend)
.aggregate(awsS3FileSystem)
.aggregate(backend)
Expand All @@ -400,13 +401,13 @@ lazy val root = (project in file("."))
.aggregate(databaseSql)
.aggregate(dockerHashing)
.aggregate(drsFileSystem)
.aggregate(`cromwell-drs-localizer`)
.aggregate(engine)
.aggregate(ftpFileSystem)
.aggregate(gcsFileSystem)
.aggregate(googlePipelinesCommon)
.aggregate(googlePipelinesV1Alpha2)
.aggregate(googlePipelinesV2Alpha1)
.aggregate(httpFileSystem)
.aggregate(jesBackend)
.aggregate(languageFactoryCore)
.aggregate(ossFileSystem)
Expand All @@ -433,3 +434,4 @@ lazy val root = (project in file("."))
.aggregate(wes2cromwell)
.aggregate(wom)
.aggregate(womtool)
.withAggregateSettings()
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ task sum {
File out = "file.md5"
}
runtime {
docker: "ubuntu:latest"
docker: "ubuntu@sha256:d1d454df0f579c6be4d8161d227462d69e163a8ff9d20a847533989cf0c94d90"
}
}

Expand All @@ -18,7 +18,7 @@ task cromwell_killer {
echo restarting yo
}
runtime {
docker: "ubuntu:latest"
docker: "ubuntu@sha256:d1d454df0f579c6be4d8161d227462d69e163a8ff9d20a847533989cf0c94d90"
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: cwl_input_typearray
testFormat: workflowsuccess
workflowType: CWL
workflowTypeVersion: v1.0
workflowRoot: input_typearray

files {
workflow: cwl_input_typearray/input_typearray.cwl
inputs: cwl_input_typearray/input_typearray.yml
}

metadata {
"submittedFiles.workflowType": CWL
"submittedFiles.workflowTypeVersion": v1.0
"outputs.input_typearray.response_f": "input.txt"
"outputs.input_typearray.response_s": "nonexistent_path.txt"
}
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
cwlVersion: v1.0
$graph:
- id: input_typearray
cwlVersion: v1.0
class: CommandLineTool
baseCommand: ['/bin/echo']
stdout: "response.txt"
requirements:
- class: DockerRequirement
dockerPull: "ubuntu:latest"
- class: InlineJavascriptRequirement
arguments:
- position: 3
valueFrom: "sentinel"
inputs:
value_f:
type:
- string
- File
inputBinding:
position: 1
doc: "an input to test with a File value"
value_s:
type:
- string
- File
inputBinding:
position: 2
doc: "an input to test with a string value"
outputs:
response_f:
type: string
outputBinding:
glob: response.txt
loadContents: true
outputEval: $(self[0].contents.split(" ")[0].split("/").slice(-1)[0])
response_s:
type: string
outputBinding:
glob: response.txt
loadContents: true
outputEval: $(self[0].contents.split(" ")[1].split("/").slice(-1)[0])
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
value_f:
class: File
path: "centaur/src/main/resources/standardTestCases/cwl_input_typearray/input.txt"
value_s: "centaur/src/main/resources/standardTestCases/cwl_input_typearray/nonexistent_path.txt"
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
name: dedup_localizations_papi_v2
testFormat: workflowsuccess
backends: [Papiv2]

files {
workflow: dedup_localizations_papi_v2/dedup_localizations_papi_v2.wdl
}

metadata {
workflowName: dedup_localizations_papi_v2
status: Succeeded
"outputs.dedup_localizations_papi_v2.check_log.num_input_localizations": 1
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
version 1.0

workflow dedup_localizations_papi_v2 {
call producer
call consumer { input: first = producer.data, second = producer.data }
call check_log { input: out_file_path = consumer.out, log_file_name = "consumer.log" }
}

task producer {
command {
echo "Here is some data." > data.txt
}

runtime {
docker: "ubuntu:latest"
}

output {
File data = "data.txt"
}
}

task consumer {
input {
File first
File second
}

command {
# noop
}

runtime {
docker: "ubuntu:latest"
}

output {
File out = stdout()
}
}

task check_log {
input {
String out_file_path
String log_file_name
}
String file_log = sub(out_file_path, "/stdout$", "/" + log_file_name)
command {
set -euo pipefail
gsutil cp ~{file_log} log.txt
set +e
grep 'Localizing input gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/travis/dedup_localizations_papi_v2/' log.txt | grep -c "data.txt"
}
output {
File out = stdout()
Int num_input_localizations = read_int(stdout())
}
runtime { docker: "google/cloud-sdk" }
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
version 1.0

workflow wf_level_file_size {
File input1 = "dos://wb-mock-drs-dev.storage.googleapis.com/4a3908ad-1f0b-4e2a-8a92-611f2123e8b0"
File input2 = "dos://wb-mock-drs-dev.storage.googleapis.com/0c8e7bc6-fd76-459d-947b-808b0605beb3"
File input1 = "drs://wb-mock-drs-dev.storage.googleapis.com/4a3908ad-1f0b-4e2a-8a92-611f2123e8b0"
File input2 = "drs://wb-mock-drs-dev.storage.googleapis.com/0c8e7bc6-fd76-459d-947b-808b0605beb3"

output {
Float fileSize1 = size(input1)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
{
# For all below 5 HCA uuids, Martha does not return a service account
"drs_usa_hca.localize_drs_with_usa.file1": "dos://service.staging.explore.data.humancellatlas.org/033c9840-c5cd-438b-b0e4-8e4cd8fc8dc6?version=2019-07-04T104122.106166Z",
"drs_usa_hca.localize_drs_with_usa.file2": "dos://service.staging.explore.data.humancellatlas.org/4defa7b0-46c2-4053-8e99-b827eed1bc96?version=2019-07-04T104122.100969Z",
"drs_usa_hca.localize_drs_with_usa.file3": "dos://service.staging.explore.data.humancellatlas.org/de5dcfc1-5aea-41ba-a7ae-e72c416cb450?version=2019-07-04T104122.092788Z",
"drs_usa_hca.localize_drs_with_usa.file4": "dos://service.staging.explore.data.humancellatlas.org/16dea2c5-e2bd-45bc-b2fd-fcac0daafc48?version=2019-07-04T104122.060634Z",
"drs_usa_hca.localize_drs_with_usa.file5": "dos://service.dev.explore.data.humancellatlas.org/7c800467-9143-402f-b965-4e7cad75c1e6?version=2019-05-26T130511.722646Z"
"drs_usa_hca.localize_drs_with_usa.file1": "drs://service.staging.explore.data.humancellatlas.org/033c9840-c5cd-438b-b0e4-8e4cd8fc8dc6?version=2019-07-04T104122.106166Z",
"drs_usa_hca.localize_drs_with_usa.file2": "drs://service.staging.explore.data.humancellatlas.org/4defa7b0-46c2-4053-8e99-b827eed1bc96?version=2019-07-04T104122.100969Z",
"drs_usa_hca.localize_drs_with_usa.file3": "drs://service.staging.explore.data.humancellatlas.org/de5dcfc1-5aea-41ba-a7ae-e72c416cb450?version=2019-07-04T104122.092788Z",
"drs_usa_hca.localize_drs_with_usa.file4": "drs://service.staging.explore.data.humancellatlas.org/16dea2c5-e2bd-45bc-b2fd-fcac0daafc48?version=2019-07-04T104122.060634Z",
"drs_usa_hca.localize_drs_with_usa.file5": "drs://service.dev.explore.data.humancellatlas.org/7c800467-9143-402f-b965-4e7cad75c1e6?version=2019-05-26T130511.722646Z"
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ version 1.0
workflow gpu_cuda_image {

input {
Array[String] driver_versions = [ "390.46" ]
Array[String] driver_versions = [ "418.87.00" ]
}

scatter (driver_version in driver_versions) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
name: localization_sanity_papi_v2
testFormat: workflowsuccess
workflowType: WDL
workflowTypeVersion: 1.0
backends: [Papiv2]

files {
workflow: localization_sanity_papi_v2/localization_sanity_papi_v2.wdl
}

metadata {
status: Succeeded
"outputs.localization_sanity.sanity_check.lines.0": "file a.txt: 1"
"outputs.localization_sanity.sanity_check.lines.1": "file b.txt: 1"
"outputs.localization_sanity.sanity_check.lines.2": "file c.txt: 1"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
version 1.0

task make_files {
command <<<
names=(a b c)
mkdir -p "${names[@]}"
for name in "${names[@]}"; do
touch "${name}/dummy.txt" # the first file is not bulk-transferred via `gsutil cp -I ...` which is what this test is about.
touch "${name}/${name}.txt"
done
>>>
output {
# Intentionally not globbed as the current implementaton of globbing would defeat what this test
# is trying to assert.
Array[File] files = ["a/dummy.txt", "a/a.txt", "b/dummy.txt", "b/b.txt", "c/dummy.txt", "c/c.txt"]
}
runtime {
docker: "ubuntu:latest"
}
}

task sanity_check {
input {
Array[File] files
}
command <<<
names=(a b c)
for name in "${names[@]}"; do
file="${name}.txt"
echo "file $file: $(find . -name $file | wc -l)"
done
>>>
output {
Array[String] lines = read_lines(stdout())
}
runtime {
docker: "ubuntu:latest"
}
}

workflow localization_sanity {
call make_files
call sanity_check { input: files = make_files.files }
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ tags: [ big_metadata ]

files {
workflow: lots_of_inputs/lots_of_inputs.wdl
inputs: lots_of_inputs/lots_of_inputs.inputs
}

metadata {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{ "lots_of_inputs.how_many_is_lots": 400 }
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ task make_array {
}
workflow lots_of_inputs {
call make_array { input: n = 400 }
Int how_many_is_lots
call make_array { input: n = how_many_is_lots }
call do_nothing { input: f = make_array.a }
output {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{ "lots_of_inputs.how_many_is_lots": 10000 }
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# This test makes sure that:
# - 10000 output files are all found and collected by the glob() method
# - 10000 input files to a task doesn't make anything explode inappropriately
name: lots_of_inputs_papiv2
testFormat: workflowsuccess
tags: [ big_metadata ]
backends: [ Papiv2 ]

files {
workflow: lots_of_inputs/lots_of_inputs.wdl
inputs: lots_of_inputs/lots_of_inputs_papiv2.inputs
}

metadata {
workflowName: lots_of_inputs
status: Succeeded
"outputs.lots_of_inputs.out_count": "10000"
"outputs.lots_of_inputs.nothing_out": "no-op"
}
Loading

0 comments on commit b61c136

Please sign in to comment.