diff --git a/src/api/workflows/main.go b/src/api/workflows/main.go index 24903d8..de50089 100644 --- a/src/api/workflows/main.go +++ b/src/api/workflows/main.go @@ -13,14 +13,20 @@ var WORKFLOW_VARIANT_SCHEMA WorkflowSchema = map[string]interface{}{ "name": "Compressed-VCF Elasticsearch Indexing", "description": "This ingestion workflow will validate and ingest a BGZip-Compressed-VCF into Elasticsearch.", "data_type": "variant", + "tags": []string{"variant"}, "file": "vcf_gz.wdl", - "action": "ingestion", + "type": "ingestion", "inputs": []map[string]interface{}{ { - "id": "vcf_gz_file_names", - "type": "file[]", - "required": true, - "extensions": []string{".vcf.gz"}, + "id": "project_dataset", + "type": "project:dataset", + "required": true, + }, + { + "id": "vcf_gz_file_names", + "type": "file[]", + "required": true, + "pattern": "^.*\\.vcf\\.gz$", }, { "id": "assembly_id", @@ -37,26 +43,15 @@ var WORKFLOW_VARIANT_SCHEMA WorkflowSchema = map[string]interface{}{ "default": "false", }, { - "id": "gohan_url", - "type": "string", - "required": true, - "value": "FROM_CONFIG", - "hidden": true, - }, - }, - "outputs": []map[string]interface{}{ - { - "id": "txt_output", - "type": "file", - "value": "{txt_output}", - }, - { - "id": "err_output", - "type": "file", - "value": "{err_output}", + "id": "gohan_url", + "type": "service-kind", + "required": true, + "injected": true, + "service_kind": "gohan", }, }, }, }, "analysis": map[string]interface{}{}, + "export": map[string]interface{}{}, } diff --git a/src/api/workflows/vcf_gz.wdl b/src/api/workflows/vcf_gz.wdl index 343f3de..af51b7b 100644 --- a/src/api/workflows/vcf_gz.wdl +++ b/src/api/workflows/vcf_gz.wdl @@ -2,24 +2,37 @@ workflow vcf_gz { String gohan_url Array[File] vcf_gz_file_names String assembly_id - String project_id - String dataset_id + String project_dataset String filter_out_references - String secret__access_token + String access_token + + call project_and_dataset_id { + input: project_dataset = project_dataset + } scatter(file_name in vcf_gz_file_names) { call vcf_gz_gohan { input: gohan_url = gohan_url, vcf_gz_file_name = file_name, assembly_id = assembly_id, - project = project_id, - dataset = dataset_id, + project = project_and_dataset_id.out[0], + dataset = project_and_dataset_id.out[1], filter_out_references = filter_out_references, - access_token = secret__access_token, + access_token = access_token, } } } +task project_and_dataset_id { + input { + String project_dataset + } + command <<< python3 -c 'import json; print(json.dumps("~{project_dataset}".split(":")))' >>> + output { + Array[String] out = read_json(stdout()) + } +} + task vcf_gz_gohan { String gohan_url String vcf_gz_file_name