import os import tempfile from htc_utils import Job, Submit from glob import glob from pprint import pprint # Fill in the paths if HTCondor is not already installed globally: # os.environ['CONDOR_CONFIG'] = '/path/to/condor/etc/condor_config' # os.environ['PATH'] = ':'.join(['/path/to/condor/bin', os.environ['PATH']]) def job_spawner(args): ifile = tempfile.NamedTemporaryFile(mode='w+b', prefix='generator2_', dir='.', delete=True) print("Spawning: {}".format(ifile.name)) # Make sure we do not have a file extension shortname = os.path.basename(os.path.splitext(ifile.name)[0]) # Begin job file generation j = Job(shortname) # Enable HTCondor logging (logs stored in "./logs") j.logging('logs', create=True) # Instruct HTCondor to run an executable with particular arguments, and queue it for # each argument passed to the function for arg in args: j.subattr('executable', os.path.abspath('../worker/worker1.py')) j.subattr('arguments', '--output-dir {} {}'.format(RESULTS, arg)) j.subattr('queue') # Save the fresh job file j.commit() # Submit the job file to the cluster. sub = Submit(j) sub.execute() if __name__ == '__main__': # Where to save output RESULTS = os.path.abspath('../results/sample1') # Create list of paths to our sample data (created with mkdata.sh) DATA = [ os.path.abspath(g) for g in glob('../data/sample1/*.dat') ] job_spawner(DATA)