blob: f18a7464ddab2e65d1c004629a72af0e0a579a53 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
|
#!/usr/bin/env python
import os
from htc_utils import Job, Submit
from multiprocessing import Pool
from glob import glob
from pprint import pprint
os.environ['CONDOR_CONFIG'] = '/Users/jhunk/Downloads/condor-8.2.6-x86_64_MacOSX7-stripped/etc/condor_config'
os.environ['PATH'] = '/Users/jhunk/Downloads/condor-8.2.6-x86_64_MacOSX7-stripped/bin:/Users/jhunk/Downloads/condor-8.2.6-x86_64_MacOSX7-stripped/sbin:/usr/local/lib/wxPython/bin:/Users/jhunk/.virtualenvs/python27/bin:/opt/local/libexec/gnubin:/opt/local/bin:/opt/local/sbin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/X11/bin:/usr/texbin'
def job_spawner(ifile):
print("Spawning: {}".format(ifile))
j = Job(os.path.basename(os.path.splitext(ifile)[0]))
j.logging('logs')
j.attr('executable', os.path.abspath('../worker/worker1.py'))
j.attr('arguments', '--output-dir {} {}'.format(RESULTS, ifile))
j.attr('queue')
j.commit()
sub = Submit(j)
sub.execute()
if __name__ == '__main__':
RESULTS = os.path.abspath('../results/sample1')
DATA = [ os.path.abspath(g) for g in glob('../data/sample1/*.dat') ]
p = Pool(8)
p.map(job_spawner, DATA)
|