Skip to content

Commit dc72ada

Browse files
author
hlasimpk
committed
Merge branch 'master' of https://github.com/rigdenlab/ample
2 parents 1a437be + 3d94390 commit dc72ada

15 files changed

Lines changed: 264 additions & 225 deletions

File tree

ample/ensembler/__init__.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@
3636
from ample.util import pdb_edit
3737
from ample.util import printTable
3838

39+
from pyjob import Script
40+
3941
logger = logging.getLogger(__name__)
4042

4143

@@ -57,15 +59,12 @@ def cluster_script(amoptd, python_path="ccp4-python"):
5759
"""
5860
# write out script
5961
work_dir = amoptd['work_dir']
60-
script_path = os.path.join(work_dir, "submit_ensemble.sh")
61-
with open(script_path, "w") as job_script:
62-
job_script.write(ample_util.SCRIPT_HEADER + os.linesep)
63-
job_script.write("export CCP4_SCR=${TMPDIR}" + os.linesep) # Added by Ronan after issues on CCP4online server
64-
job_script.write("ccp4-python -m ample.ensembler -restart_pkl {0}".format(amoptd['results_path']) + os.linesep)
65-
66-
# Make executable
67-
os.chmod(script_path, 0o777)
68-
return script_path
62+
script = Script(directory=work_dir, stem="submit_ensemble")
63+
script.append("export CCP4_SCR=${TMPDIR}")
64+
script.append("ccp4-python -m ample.ensembler -restart_pkl {0}".format(amoptd['results_path']))
65+
script.write()
66+
67+
return script
6968

7069

7170
def create_ensembles(amoptd):

ample/main.py

Lines changed: 66 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
import shutil
99
import sys
1010
import time
11+
import warnings
1112

1213
from ample import ensembler
1314
from ample.ensembler.constants import UNMODIFIED
@@ -24,9 +25,10 @@
2425
from ample.util import process_models
2526
from ample.util import pyrvapi_results
2627
from ample.util import reference_manager
27-
from ample.util import workers_util
2828
from ample.util import version
2929

30+
from pyjob.factory import TaskFactory
31+
3032
__author__ = "Jens Thomas, Felix Simkovic, Adam Simpkin, Ronan Keegan, and Jaclyn Bibby"
3133
__credits__ = "Daniel Rigden, Martyn Winn, and Olga Mayans"
3234
__email__ = "drigden@liverpool.ac.uk"
@@ -79,6 +81,17 @@ def monitor():
7981
else:
8082
monitor = None
8183

84+
# Highlight deprecated command line arguments
85+
if amopt.d['submit_cluster']:
86+
message = "-%s has been deprecated and will be removed in version %s!" % ('submit_cluster', 1.6)
87+
warnings.warn(message, DeprecationWarning)
88+
if amopt.d["submit_pe_lsf"]:
89+
message = "-%s has been deprecated and will be removed in version %s! Use -submit_pe instead" % ('submit_pe_lsf', 1.6)
90+
warnings.warn(message, DeprecationWarning)
91+
if amopt.d["submit_pe_sge"]:
92+
message = "-%s has been deprecated and will be removed in version %s! Use -submit_pe instead" % ('submit_pe_sge', 1.6)
93+
warnings.warn(message, DeprecationWarning)
94+
8295
# Process any files we may have been given
8396
model_results = process_models.extract_and_validate_models(amopt.d)
8497
if model_results:
@@ -147,24 +160,25 @@ def monitor():
147160
return
148161

149162
def benchmarking(self, optd):
150-
if optd['submit_cluster']:
163+
if optd['submit_qtype'] != 'local':
151164
# Pickle dictionary so it can be opened by the job to get the parameters
152165
ample_util.save_amoptd(optd)
153166
script = benchmark_util.cluster_script(optd)
154-
workers_util.run_scripts(
155-
job_scripts=[script],
156-
monitor=monitor,
157-
nproc=optd['nproc'],
158-
job_time=43200,
159-
job_name='benchmark',
160-
submit_cluster=optd['submit_cluster'],
161-
submit_qtype=optd['submit_qtype'],
162-
submit_queue=optd['submit_queue'],
163-
submit_pe_lsf=optd['submit_pe_lsf'],
164-
submit_pe_sge=optd['submit_pe_sge'],
165-
submit_array=optd['submit_array'],
166-
submit_max_array=optd['submit_max_array'],
167-
)
167+
with TaskFactory(
168+
optd['submit_qtype'],
169+
script,
170+
cwd=optd['work_dir'],
171+
environment=optd['submit_pe'],
172+
run_time=43200,
173+
name='benchmark',
174+
nprocesses=optd['nproc'],
175+
max_array_size=optd['submit_max_array'],
176+
queue=optd['submit_queue'],
177+
shell="/bin/bash",
178+
) as task:
179+
task.run()
180+
task.wait(interval=5, monitor_f=monitor)
181+
168182
# queue finished so unpickle results
169183
optd.update(ample_util.read_amoptd(optd['results_path']))
170184
else:
@@ -255,25 +269,25 @@ def ensembling(self, optd):
255269
msg = "ERROR! Cannot find any pdb files in: {0}".format(optd['models_dir'])
256270
exit_util.exit_error(msg)
257271
optd['ensemble_ok'] = os.path.join(optd['work_dir'], 'ensemble.ok')
258-
if optd['submit_cluster']:
272+
if optd['submit_qtype'] != 'local':
259273
# Pickle dictionary so it can be opened by the job to get the parameters
260274
ample_util.save_amoptd(optd)
261275
script = ensembler.cluster_script(optd)
262276
ensembler_timeout = ensembler.get_ensembler_timeout(optd)
263-
workers_util.run_scripts(
264-
job_scripts=[script],
265-
monitor=monitor,
266-
nproc=optd['nproc'],
267-
job_time=ensembler_timeout,
268-
job_name='ensemble',
269-
submit_cluster=optd['submit_cluster'],
270-
submit_qtype=optd['submit_qtype'],
271-
submit_queue=optd['submit_queue'],
272-
submit_pe_lsf=optd['submit_pe_lsf'],
273-
submit_pe_sge=optd['submit_pe_sge'],
274-
submit_array=optd['submit_array'],
275-
submit_max_array=optd['submit_max_array'],
276-
)
277+
with TaskFactory(
278+
optd['submit_qtype'],
279+
script,
280+
cwd=optd['work_dir'],
281+
environment=optd['submit_pe'],
282+
run_time=ensembler_timeout,
283+
name='benchmark',
284+
nprocesses=optd['nproc'],
285+
max_array_size=optd['submit_max_array'],
286+
queue=optd['submit_queue'],
287+
shell="/bin/bash",
288+
) as task:
289+
task.run()
290+
task.wait(interval=5, monitor_f=monitor)
277291
# queue finished so unpickle results
278292
optd.update(ample_util.read_amoptd(optd['results_path']))
279293
else:
@@ -402,24 +416,27 @@ def monitor():
402416

403417
# Change to mrbump directory before running
404418
os.chdir(optd['mrbump_dir'])
405-
ok = workers_util.run_scripts(
406-
job_scripts=optd['mrbump_scripts'],
407-
monitor=monitor,
408-
check_success=mrbump_util.checkSuccess,
409-
early_terminate=optd['early_terminate'],
410-
nproc=optd['nproc'],
411-
job_time=mrbump_util.MRBUMP_RUNTIME,
412-
job_name='mrbump',
413-
submit_cluster=optd['submit_cluster'],
414-
submit_qtype=optd['submit_qtype'],
415-
submit_queue=optd['submit_queue'],
416-
submit_pe_lsf=optd['submit_pe_lsf'],
417-
submit_pe_sge=optd['submit_pe_sge'],
418-
submit_array=optd['submit_array'],
419-
submit_max_array=optd['submit_max_array'],
420-
)
421-
422-
if not ok:
419+
420+
with TaskFactory(
421+
optd['submit_qtype'],
422+
optd['mrbump_scripts'],
423+
cwd=bump_dir,
424+
environment=optd['submit_pe'],
425+
run_time=mrbump_util.MRBUMP_RUNTIME,
426+
name="mrbump",
427+
nprocesses=optd['nproc'],
428+
max_array_size=optd['submit_max_array'],
429+
queue=optd['submit_queue'],
430+
shell="/bin/bash",
431+
) as task:
432+
task.run()
433+
434+
if optd['early_terminate']:
435+
task.wait(interval=5, monitor_f=monitor, success_f=mrbump_util.checkSuccess)
436+
else:
437+
task.wait(interval=5, monitor_f=monitor)
438+
439+
if not task.completed:
423440
msg = (
424441
"An error code was returned after running MRBUMP on the ensembles!\n"
425442
+ "For further information check the logs in directory: {0}".format(optd['mrbump_dir'])

ample/modelling/__main__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def process_args(args):
1919
if args.rosetta_flagsfile:
2020
args.rosetta_flagsfile = os.path.abspath(args.rosetta_flagsfile)
2121
if args.nproc is None:
22-
if args.submit_cluster:
22+
if args.submit_qtype != 'local':
2323
args.nproc = 1
2424
else:
2525
args.nproc = multiprocessing.cpu_count()
@@ -37,7 +37,7 @@ def process_args(args):
3737
argparse_util.add_cluster_submit_options(parser)
3838

3939
work_dir = os.path.abspath('rosetta_modelling')
40-
parser.set_defaults(submit_cluster=False, submit_qtype='SGE', submit_array=True, nmodels=1000, work_dir=work_dir)
40+
parser.set_defaults(submit_qtype='SGE', submit_array=True, nmodels=1000, work_dir=work_dir)
4141
args = parser.parse_args()
4242
process_args(args)
4343

@@ -66,7 +66,7 @@ def process_args(args):
6666
rm.nchains = args.nchains
6767

6868
rm.nproc = args.nproc
69-
rm.submit_cluster = args.submit_cluster
69+
rm.submit_cluster = args.submit_qtype != "local"
7070
rm.submit_qtype = args.submit_qtype
7171
rm.submit_queue = args.submit_queue
7272
rm.submit_array = args.submit_array

0 commit comments

Comments
 (0)