-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathadd_mosdepth.py
More file actions
executable file
·209 lines (179 loc) · 6.06 KB
/
add_mosdepth.py
File metadata and controls
executable file
·209 lines (179 loc) · 6.06 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
#!/usr/bin/env python3
import argparse
import json
from pathlib import Path
from datetime import datetime
def join_and_validate(date, time):
"""
Join date and time strings and validate the format.
Args:
date (str): Date string in YYYY-MM-DD format.
time (str): Time string in HH:MM:SS format.
Returns:
str: Combined timestamp string.
"""
timestamp = f"{date} {time}"
try:
datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S")
except ValueError:
raise SystemExit(
f"Invalid timestamp '{timestamp}' (expected YYYY-MM-DD HH:MM:SS)"
)
return timestamp
def parse_mosdepth_metric(summary_path: Path) -> str:
"""
Extract the 4th column of the last non-empty line from a mosdepth summary file.
Args:
summary_path (Path): Path to the mosdepth summary file.
Returns:
str: The extracted metric value.
Raises:
ValueError: If the file is empty or does not have the expected format.
"""
with summary_path.open() as f:
lines = [line.strip() for line in f if line.strip()]
if not lines:
raise ValueError("Mosdepth summary file is empty")
last_line = lines[-1]
columns = last_line.split()
if len(columns) < 4:
raise ValueError(
f"Unexpected mosdepth format: {last_line}"
)
return columns[3]
def to_float(value, default=None):
"""
Convert a value to float, returning a default if conversion fails.
Args:
value: The value to convert.
default: The value to return if conversion fails.
Returns:
float or default: The converted float value, or the default if conversion fails.
"""
try:
return float(value)
except (ValueError, TypeError):
return default
def construct_mosdepth_job(sample_name, job_start, job_stop, summary_path, metric):
"""
Build the mosdepth job JSON object.
Args:
sample_name (str): Name of the sample.
job_start (str): Job start time.
job_stop (str): Job stop time.
summary_path (Path): Path to the mosdepth summary file.
metric (str): Extracted metric value.
Returns:
dict: The constructed mosdepth job JSON object.
"""
summary_path = Path(summary_path)
val_float = to_float(metric)
metric_flag = "PASS"
# Dedup Coverage DN
if val_float < 30 and sample_name.endswith("DN"):
metric_flag = "FAILED"
# Dedup Coverage DT
if val_float < 80 and sample_name.endswith("DT"):
metric_flag = "FAILED"
return {
"job_name": f"mosdepth.{sample_name}",
"job_start": job_start,
"job_stop": job_stop,
"job_status": "COMPLETED",
"file": [
{
"location_uri": f"abacus://{summary_path}",
"file_name": summary_path.name,
}
],
"metric": [
{
"metric_name": "dedup_coverage",
"metric_value": metric,
"metric_flag": metric_flag,
"metric_deliverable": True
}
],
}
def main():
"""
Main function to add mosdepth job entries to tagged JSON.
1. Parse command-line arguments.
2. Extract metric from mosdepth summary file.
3. Load tagged JSON file.
4. Construct mosdepth job object.
5. Traverse samples and readsets to append the job object.
6. Write updated tagged JSON to a new file.
"""
parser = argparse.ArgumentParser(
description="Add mosdepth job entries to a tagged JSON file",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Example usage:
add_mosdepth_job.py \\
--sample-name SAMPLE_NAME \\
--job-start "YYYY-MM-DD HH:MM:SS" \\
--job-stop "YYYY-MM-DD HH:MM:SS" \\
--summary-txt /absolute/path/to/sample.mosdepth.summary.txt \\
--tagged-json /absolute/path/to/tagged.json
Notes:
- The mosdepth metric is extracted from the last line, 4th column
of the summary file.
- All readsets whose name starts with the provided sample name
will receive the new mosdepth job.
- Output is written next to the input JSON with a '_mosdepth.json' suffix.
"""
)
parser.add_argument("--sample-name", required=True)
parser.add_argument(
"--job-start",
required=True,
nargs=2,
metavar=("DATE", "TIME"),
help="Job start time (YYYY-MM-DD HH:MM:SS)"
)
parser.add_argument(
"--job-stop",
required=True,
nargs=2,
metavar=("DATE", "TIME"),
help="Job stop time (YYYY-MM-DD HH:MM:SS)"
)
parser.add_argument("--summary-txt", required=True)
parser.add_argument("--tagged-json", required=True)
args = parser.parse_args()
summary_path = Path(args.summary_txt)
tagged_json_path = Path(args.tagged_json)
if not summary_path.is_file():
raise FileNotFoundError(summary_path)
if not tagged_json_path.is_file():
raise FileNotFoundError(tagged_json_path)
# Parse metric
metric_value = parse_mosdepth_metric(summary_path)
# Load tagged JSON
with tagged_json_path.open() as f:
tagged = json.load(f)
job_start = join_and_validate(*args.job_start)
job_stop = join_and_validate(*args.job_stop)
# Build job object template
mosdepth_job = construct_mosdepth_job(
sample_name=args.sample_name,
job_start=job_start,
job_stop=job_stop,
summary_path=summary_path,
metric=metric_value,
)
# Traverse samples / readsets
for sample in tagged.get("sample", []):
for readset in sample.get("readset", []):
if readset.get("readset_name", "").startswith(args.sample_name):
readset.setdefault("job", []).append(mosdepth_job)
# Write output file
output_path = tagged_json_path.with_name(
tagged_json_path.stem + "_mosdepth.json"
)
with output_path.open("w") as f:
json.dump(tagged, f, indent=4)
print(f"Written: {output_path}")
if __name__ == "__main__":
main()