Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/develop' into feature/githubacti…
Browse files Browse the repository at this point in the history
…ons_112524
  • Loading branch information
D-Pankey committed Dec 9, 2024
2 parents 5fc5c3c + 7d131b3 commit a457196
Show file tree
Hide file tree
Showing 10 changed files with 81 additions and 84 deletions.
2 changes: 1 addition & 1 deletion beagle/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "1.87.0"
__version__ = "1.88.0"
10 changes: 8 additions & 2 deletions file_system/helper/access_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,14 @@ def construct_csv(self):
"""

# get fastq metadata for a given request
fastqs = File.objects.filter(file_group__slug="lims", request_id__in=self.request_ids)
fastq_metadata = [fastq.filemetadata_set.values()[0]["metadata"] for fastq in fastqs]
files = FileRepository.filter(
file_group=settings.IMPORT_FILE_GROUP,
metadata={settings.IGO_COMPLETE_METADATA_KEY: True},
filter_redact=True,
).all()
fastqs = files.filter(metadata__igoRequestId__in=self.request_ids)
fastq_metadata = [fastq.metadata for fastq in fastqs]
fastq_metadata = sorted(fastq_metadata, key=lambda d: d["cmoSampleName"])
cmoPatientId = set([fastq["cmoPatientId"] for fastq in fastq_metadata])
# get DMP BAM file group
dmp_bams = FileRepository.filter(file_group=settings.DMP_BAM_FILE_GROUP)
Expand Down
30 changes: 16 additions & 14 deletions runner/models.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import os
import uuid
from enum import IntEnum
from django.db import models
from django.db.models import F
from django.db import models, transaction
from file_system.models import File, FileGroup, Sample, Request
from beagle_etl.models import Operator, JobGroup, JobGroupNotifier
from django.contrib.postgres.fields import JSONField
Expand Down Expand Up @@ -316,18 +315,21 @@ def save(self, *args, **kwargs):
# TODO do we want to decrement if a job goes from completed/failed to open or failed to complete?
# We can also a prevent a job from going to open once it's in a closed state
if self.operator_run and self.original["status"] != self.status:
if self.status == RunStatus.COMPLETED:
self.operator_run.increment_completed_run()
self.original["status"] = RunStatus.COMPLETED
self.finished_date = now()
elif self.status == RunStatus.FAILED:
self.operator_run.increment_failed_run()
self.original["status"] = RunStatus.FAILED
self.finished_date = now()
elif self.status == RunStatus.TERMINATED:
self.operator_run.increment_failed_run()
self.original["status"] = RunStatus.TERMINATED
self.finished_date = now()
with transaction.atomic():
oparator_run = OperatorRun.objects.select_for_update().get(id=self.operator_run.id)
if self.status == RunStatus.COMPLETED:
oparator_run.increment_completed_run()
self.original["status"] = RunStatus.COMPLETED
self.finished_date = now()
elif self.status == RunStatus.FAILED:
oparator_run.increment_failed_run()
self.original["status"] = RunStatus.FAILED
self.finished_date = now()
elif self.status == RunStatus.TERMINATED:
oparator_run.increment_failed_run()
self.original["status"] = RunStatus.TERMINATED
self.finished_date = now()
oparator_run.save()
super(Run, self).save(*args, **kwargs)


Expand Down
26 changes: 13 additions & 13 deletions runner/operator/access/heme/qc/input_template.json.jinja2
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@
"samples-json": {{samples_json_content}},
"multiqc_config": {
"class": "File",
"location": "juno:///work/cch/production/resources/cmo-ch/versions/v1.0/multiqc_config/versions/v1.0/config_ch.yaml",
"location": "juno:///work/cch/production/resources/cmo-ch/versions/v1.0/multiqc_config/versions/v1.0/config_ch.yaml"
},
"biometrics_bed_file": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/accessH_probes_Y_only.bed",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/accessH_probes_Y_only.bed"
},
"biometrics_vcf_file": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/MSK-ACCESS-v1_0-TilingaAndFpSNPs_viral.vcf",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/MSK-ACCESS-v1_0-TilingaAndFpSNPs_viral.vcf"
},
"biometrics_json": true,
"biometrics_plot": true,
Expand All @@ -27,37 +27,37 @@
"hsmetrics_minimum_mapping_quality": 1,
"noise_sites_bed": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/MSK-Heme-v1_0_probe.bed",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/MSK-Heme-v1_0_probe.bed"
},
"bait_intervals": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/accessH_probes_final_viral.interval_list",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/accessH_probes_final_viral.interval_list"
},
"target_intervals": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/accessH_targets_final_viral.interval_list",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/accessH_targets_final_viral.interval_list"
},
"reference": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.fasta",
"secondaryFiles": [
{
"class": "File",
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.fasta.fai",
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.fasta.fai"
},
{
"class": "File",
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.dict",
},
],
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.dict"
}
]
},
"hotspots_maf": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/hotspot-list-union-v1-v2_with_TERT_with_aa.maf",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/hotspot-list-union-v1-v2_with_TERT_with_aa.maf"
},
"mosdepth_bed": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/MSK-Heme-v1_0_canonicaltargets.bed",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/MSK-Heme-v1_0_canonicaltargets.bed"
},
"sequence_qc_min_basq": 1,
"sequence_qc_min_mapq": 1,
Expand All @@ -79,5 +79,5 @@
"fragment_count": 1,
"filter_duplicate": 0,
"omaf": true,
"generic_counting": true,
"generic_counting": true
}
26 changes: 13 additions & 13 deletions runner/operator/access/v2_1_0/qc/input_template.json.jinja2
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@
"samples-json": {{samples_json_content}},
"multiqc_config": {
"class": "File",
"location": "juno:///work/access/production/resources/msk-access/v2.0/multi_qc/v1.0/config_accessv2.yaml",
"location": "juno:///work/access/production/resources/msk-access/v2.0/multi_qc/v1.0/config_accessv2.yaml"
},
"biometrics_bed_file": {
"class": "File",
"location": "juno:///work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_probes_Y_only.bed",
"location": "juno:///work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_probes_Y_only.bed"
},
"biometrics_vcf_file": {
"class": "File",
"location": "juno:///work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_0-TilingaAndFpSNPs.vcf",
"location": "juno:///work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_0-TilingaAndFpSNPs.vcf"
},
"biometrics_json": true,
"biometrics_plot": true,
Expand All @@ -27,37 +27,37 @@
"hsmetrics_minimum_mapping_quality": 1,
"noise_sites_bed": {
"class": "File",
"location": "juno:///work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_targetsAllwFP.bed",
"location": "juno:///work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_targetsAllwFP.bed"
},
"bait_intervals": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_probesAllwFP.interval_list",
"location": "juno:///juno/work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_probesAllwFP.interval_list"
},
"target_intervals": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_canonicaltargets.interval_list",
"location": "juno:///juno/work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/MSK-ACCESS-v2_canonicaltargets.interval_list"
},
"reference": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.fasta",
"secondaryFiles": [
{
"class": "File",
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.fasta.fai",
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.fasta.fai"
},
{
"class": "File",
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.dict",
},
],
"location": "juno:///juno/work/access/production/resources/reference/versions/hg19_virus_special/hg19_virus.dict"
}
]
},
"hotspots_maf": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/hotspot-list-union-v1-v2_with_TERT_with_aa.maf",
"location": "juno:///juno/work/access/production/resources/msk-access/v2.0/regions_of_interest/versions/v1.0/hotspot-list-union-v1-v2_with_TERT_with_aa.maf"
},
"mosdepth_bed": {
"class": "File",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/MSK-Heme-v1_0_canonicaltargets.bed",
"location": "juno:///juno/work/access/production/resources/tools/resources/access-heme/region_of_interest/versions/v1.0/MSK-Heme-v1_0_canonicaltargets.bed"
},
"sequence_qc_min_basq": 1,
"sequence_qc_min_mapq": 1,
Expand All @@ -79,5 +79,5 @@
"fragment_count": 1,
"filter_duplicate": 0,
"omaf": true,
"generic_counting": true,
"generic_counting": true
}
16 changes: 8 additions & 8 deletions runner/operator/argos_bam_operator/v1_0_0/argos_bam_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@


class ArgosBamOperator(Operator):
ARGOS_NAME = "argos"
ARGOS_NAME = "argosBam"
ARGOS_VERSION = "1.6.1"

def get_jobs(self):
Expand Down Expand Up @@ -118,15 +118,15 @@ def get_argos_jobs(self, argos_inputs):
pipeline_id = self.get_pipeline_id()
pipeline = Pipeline.objects.get(id=pipeline_id)
argos_bam_job_data = {"app": pipeline_id, "inputs": job, "name": name, "tags": tags}
output_dir = os.path.join(pipeline.output_directory, "argosBam", get_project_prefix(self.request_id))
if self.output_directory_prefix:
tags["output_directory_prefix"] = self.output_directory_prefix
if self.job_group_id:
jg = JobGroup.objects.get(id=self.job_group_id)
jg_created_date = jg.created_date.strftime("%Y%m%d_%H_%M_%f")
output_directory = os.path.join(
pipeline.output_directory, "argosBam", output_prefix, pipeline.version, jg_created_date
)
argos_bam_job_data["output_directory"] = output_directory
output_dir = os.path.join(pipeline.output_directory, "argosBam", output_prefix)
if self.job_group_id:
jg = JobGroup.objects.get(id=self.job_group_id)
jg_created_date = jg.created_date.strftime("%Y%m%d_%H_%M_%f")
output_directory = os.path.join(output_dir, pipeline.version, jg_created_date)
argos_bam_job_data["output_directory"] = output_directory
argos_jobs.append(RunCreator(**argos_bam_job_data))
return argos_jobs

Expand Down
16 changes: 8 additions & 8 deletions runner/operator/chronos_operator/chronos_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def get_jobs(self, pairing_override=None):
igocomplete_query = Q(metadata__igoComplete=True)
missing_fields_query = self.filter_out_missing_fields_query()
q = recipe_query & assay_query & igocomplete_query & missing_fields_query
tempo_files = FileRepository.filter(queryset=FileRepository.all(), q=q, file_group=settings.IMPORT_FILE_GROUP)
tempo_files = FileRepository.filter(queryset=FileRepository.all(), q=q, file_group=self.file_group)
tempo_files = FileRepository.filter(queryset=tempo_files, filter_redact=True)

self.send_message(
Expand Down Expand Up @@ -154,7 +154,7 @@ def get_jobs(self, pairing_override=None):
settings.REQUEST_ID_METADATA_KEY: self.request_id,
settings.TUMOR_OR_NORMAL_METADATA_KEY: "Tumor",
},
file_group=settings.IMPORT_FILE_GROUP,
file_group=self.file_group,
values_metadata=settings.CMO_SAMPLE_TAG_METADATA_KEY,
)
used_normals = set()
Expand All @@ -168,7 +168,7 @@ def get_jobs(self, pairing_override=None):
mapping = self.get_mapping_for_pair(tumor, pairing["normal"], mapping_all, used_normals)
normal_request_id = FileRepository.filter(
metadata={settings.SAMPLE_ID_METADATA_KEY: pairing["normal"]},
file_group=settings.IMPORT_FILE_GROUP,
file_group=self.file_group,
values_metadata=settings.REQUEST_ID_METADATA_KEY,
)
used_normals_requests.add(normal_request_id)
Expand Down Expand Up @@ -243,22 +243,22 @@ def get_jobs(self, pairing_override=None):
continue
patient_id = FileRepository.filter(
metadata={settings.CMO_SAMPLE_TAG_METADATA_KEY: sample},
file_group=settings.IMPORT_FILE_GROUP,
file_group=self.file_group,
values_metadata=settings.PATIENT_ID_METADATA_KEY,
).first()
request_id = FileRepository.filter(
metadata={settings.CMO_SAMPLE_TAG_METADATA_KEY: sample},
file_group=settings.IMPORT_FILE_GROUP,
file_group=self.file_group,
values_metadata=settings.REQUEST_ID_METADATA_KEY,
).first()
gene_panel = FileRepository.filter(
metadata={settings.CMO_SAMPLE_TAG_METADATA_KEY: sample},
file_group=settings.IMPORT_FILE_GROUP,
file_group=self.file_group,
values_metadata=settings.RECIPE_METADATA_KEY,
).first()
primary_id = FileRepository.filter(
metadata={settings.CMO_SAMPLE_TAG_METADATA_KEY: sample},
file_group=settings.IMPORT_FILE_GROUP,
file_group=self.file_group,
values_metadata=settings.SAMPLE_ID_METADATA_KEY,
).first()
job_tags = copy.deepcopy(tags)
Expand Down Expand Up @@ -404,7 +404,7 @@ def get_exclusions(self):
def get_ci_tag(self, primary_id):
return FileRepository.filter(
metadata={settings.SAMPLE_ID_METADATA_KEY: primary_id},
file_group=settings.IMPORT_FILE_GROUP,
file_group=self.file_group,
values_metadata=settings.CMO_SAMPLE_TAG_METADATA_KEY,
).first()

Expand Down
11 changes: 3 additions & 8 deletions runner/operator/manifest/access_manifest_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,18 +98,13 @@ def write_to_file(self, fname, s):
Writes manifest csv to temporary location, registers it as tmp file
:return: manifest csv path
"""
# Split the string into rows using "\r\n" as the delimiter
rows = s.split("\r\n")
# Split each row into columns using "," as the delimiter
data = [row.split(",") for row in rows]
# tmp file creation
# output path
tmpdir = os.path.join(settings.BEAGLE_SHARED_TMPDIR, str(uuid.uuid4()))
Path(tmpdir).mkdir(parents=True, exist_ok=True)
output = os.path.join(tmpdir, fname)
# write csv to tmp file group
with open(output, "w+", newline="") as csvfile:
writer = csv.writer(csvfile)
writer.writerows(data)
with open(output, mode="w", encoding="utf-8", newline="") as file:
file.write(s)
# register output as tmp file
self.register_temp_file(output)
# return with juno formatting
Expand Down
7 changes: 3 additions & 4 deletions runner/operator/operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,9 @@ def __init__(
self.job_group_notifier_id = job_group_notifier_id
self.run_ids = run_ids
self.file_group = file_group
if self.file_group:
self.files = FileRepository.filter(file_group=self.file_group).all()
else:
self.files = FileRepository.filter(file_group=settings.IMPORT_FILE_GROUP).all()
if not self.file_group:
self.file_group = settings.IMPORT_FILE_GROUP
self.files = FileRepository.filter(file_group=self.file_group).all()
self.pairing = pairing
# {"pairs": [{"tumor": "tumorSampleName", "normal": "normalSampleName"}]}
self.output_directory_prefix = output_directory_prefix
Expand Down
Loading

0 comments on commit a457196

Please sign in to comment.