-
Notifications
You must be signed in to change notification settings - Fork 0
/
run_search.py
128 lines (109 loc) · 3.55 KB
/
run_search.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# coding: utf-8
# !/usr/bin/env python
import argparse
import glob
import logging
import os
logger = logging.getLogger()
from rfpipe.pipeline import pipeline_sdm
def process(sdm, gainpath, preffile, devicenum, outdir):
"""
Run rfpipe on an SDM and save unclustered results.
:param sdm: SDM file
:param gainpath: path of gainfile
:param preffile: preference file
:param devicenum: GPU number
:param outdir: output directory
:return:
"""
sdmname = sdm
if sdmname[-1] == "/":
sdmname = sdmname[:-1]
if os.path.basename(sdmname).split("_")[0] == "realfast":
datasetId = "{0}".format("_".join(os.path.basename(sdmname).split("_")[1:-1]))
else:
datasetId = os.path.basename(sdmname)
datasetId = ".".join(
[x for x in datasetId.split(".") if "scan" not in x and "cut" not in x]
)
datadir = outdir + "/" + os.path.basename(sdmname)
try:
os.mkdir(datadir)
except FileExistsError:
logging.info("Directory {0} exists, using it.".format(datadir))
except OSError:
logging.info("Can't create directory {0}".format(datadir))
else:
logging.info("Created directory {0}".format(datadir))
gainname = datasetId + ".GN"
logging.info("Searching for the gainfile {0} in {1}".format(gainname, gainpath))
gainfile = []
for path, dirs, files in os.walk(gainpath):
for f in filter(lambda x: gainname in x, files):
gainfile = os.path.join(path, gainname)
break
try:
assert len(gainfile)
logging.info("Found gainfile for {0} in {1}".format(datasetId, gainfile))
except AssertionError as err:
logging.error("No gainfile found for {0} in {1}".format(datasetId, gainfile))
raise err
prefs = {}
prefs["workdir"] = datadir
prefs["gainfile"] = gainfile
prefs["savenoise"] = False
prefs["devicenum"] = devicenum
prefs["flaglist"] = [
("badchtslide", 4.0, 20),
("badchtslide", 4, 20),
("badspw", 4.0),
("blstd", 3.5, 0.008),
]
pipeline_sdm(
sdm, inprefs=prefs, intent="TARGET", preffile=preffile, devicenum=devicenum
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Normal pipeline search on an SDM",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("-f", "--file", help="Path of SDM", required=True, type=str)
parser.add_argument(
"-d", "--devicenum", help="GPU numbers", required=False, type=int, default=0
)
parser.add_argument(
"-g",
"--gainpath",
help="Path of gain files",
required=False,
type=str,
default="/hyrule/data/users/kshitij/hdbscan/gainfiles/",
)
parser.add_argument(
"-p",
"--preffile",
help="Path of preffile",
required=False,
type=str,
default="/hyrule/data/users/kshitij/hdbscan/scripts/final/realfast_nocluster.yml",
)
parser.add_argument(
"-o",
"--outdir",
help="Output directory",
required=False,
type=str,
default="/hyrule/data/users/kshitij/hdbscan/final_data/",
)
values = parser.parse_args()
logging.info("Input Arguments:-")
for arg, value in sorted(vars(values).items()):
logging.info("Argument %s: %r", arg, value)
sdm = glob.glob(values.file)[0]
process(
sdm=sdm,
gainpath=values.gainpath,
preffile=values.preffile,
devicenum=str(values.devicenum),
outdir=values.outdir,
)