Skip to content

Commit b4d320b

Browse files
Jon PalmerJon Palmer
authored andcommitted
try to add plantismash, but not working, so leave menu as default only support fungi right now
1 parent e380f0e commit b4d320b

File tree

1 file changed

+14
-7
lines changed

1 file changed

+14
-7
lines changed

bin/funannotate-remote.py

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ def __init__(self,prog):
2424
parser.add_argument('-o', '--out', help='Basename of output files')
2525
parser.add_argument('-e', '--email', required=True, help='Email address for IPRSCAN server')
2626
parser.add_argument('--force', action='store_true', help='Over-write output folder')
27+
parser.add_argument('-a', '--antismash', default='fungi', choices=['fungi','plants'], help='antiSMASH server')
2728
args=parser.parse_args()
2829

2930
def runIPRpython(Input):
@@ -237,10 +238,16 @@ def download(url, name):
237238
shutil.rmtree(IPROUT)
238239

239240
if 'antismash' in args.methods or 'all' in args.methods:
240-
version = requests.get("https://fungismash.secondarymetabolites.org/api/v1.0/version")
241+
if args.antismash == 'fungi':
242+
base_address = "https://fungismash.secondarymetabolites.org"
243+
job_parameters = {'email': args.email, 'smcogs': 'on', 'knownclusterblast': 'on', 'activesitefinder': 'on', 'subclusterblast': 'on'}
244+
elif args.antismash == 'plants':
245+
base_address = "https://plantismash.secondarymetabolites.org"
246+
job_parameters = {'email': args.email, 'knownclusterblast': 'on', 'subclusterblast': 'on'}
247+
version = requests.get(base_address+"/api/v1.0/version")
241248
as_vers = version.json()['antismash_generation']
242249
tax = version.json()['taxon']
243-
as_status = requests.get("https://fungismash.secondarymetabolites.org/api/v1.0/stats")
250+
as_status = requests.get(base_address+"/api/v1.0/stats")
244251
queue = as_status.json()['queue_length']
245252
running = as_status.json()['running']
246253
lib.log.info("Connecting to antiSMASH %s v%s webserver" % (tax, as_vers))
@@ -250,14 +257,14 @@ def download(url, name):
250257
lib.log.error("There are more than 10 antiSMASH jobs in queue, use --force to submit anyway")
251258
sys.exit(1)
252259
job_files = {'seq': open(genbank, 'rb')}
253-
job_parameters = {'email': args.email, 'smcogs': 'on', 'knownclusterblast': 'on', 'activesitefinder': 'on', 'subclusterblast': 'on'}
260+
254261
lib.log.info("Uploading %s to webserver" % genbank)
255-
postjob = requests.post("https://fungismash.secondarymetabolites.org/api/v1.0/submit", files=job_files, data=job_parameters)
262+
postjob = requests.post(base_address+"/api/v1.0/submit", files=job_files, data=job_parameters)
256263
jobid = postjob.json()['id']
257264
#now we can query the job every so often, not sure what is reasonable here, start with 2 minutes?
258265
lib.log.info("Waiting for results from job: %s" % jobid)
259266
while True:
260-
job_status = requests.get("https://fungismash.secondarymetabolites.org/api/v1.0/status/"+jobid)
267+
job_status = requests.get(base_address+"/api/v1.0/status/"+jobid)
261268
if job_status.json()['status'] == 'done':
262269
break
263270
time.sleep(120) #check every 2 minutes
@@ -267,15 +274,15 @@ def download(url, name):
267274
lib.log.debug("%s" % job_status.json())
268275
#need to retrieve results, have to find link, seems like this might be first scaffold name?
269276
#after asking Kai Blin - there is no "easy" way to identify the output name, however, think I can grab the html file and parse it
270-
job_html = requests.get("https://fungismash.secondarymetabolites.org"+result_url)
277+
job_html = requests.get(base_address+result_url)
271278
for line in job_html.iter_lines():
272279
if 'Download GenBank summary file' in line:
273280
cols = line.split('a href="')
274281
for x in cols:
275282
if '.zip' in x:
276283
link = x.split('"')[0]
277284
baselink = link.replace('.zip', '')
278-
download_url = "https://fungismash.secondarymetabolites.org"+base_url+link
285+
download_url = base_address+base_url+link
279286
download(download_url, 'antiSMASH.zip')
280287
#now unzip and move folder
281288
zipref = zipfile.ZipFile('antiSMASH.zip', 'r')

0 commit comments

Comments
 (0)