Remove memory limitations. Add core usage limitation.

This commit is contained in:
Patrick Jentsch 2019-03-06 15:54:19 +01:00
parent 97cc15db54
commit f52fd385da

6
nlp
View File

@ -35,7 +35,7 @@ def parse_arguments():
help="Output directory.",
required=True)
parser.add_argument("--nCores",
default=multiprocessing.cpu_count(),
default=max(4, multiprocessing.cpu_count()),
dest="nCores",
help="Total number of cores available.",
required=False,
@ -78,7 +78,7 @@ class NLPWorkflow(WorkflowRunner):
os.path.join(job["output_dir"], os.path.basename(job["path"]).rsplit(".", 1)[0] + ".vrt"),
self.lang
)
nlp_jobs.append(self.addTask(label="nlp_job_-_%i" % (nlp_job_number), command=cmd, dependencies=mkdir_jobs, nCores=min(4, self.nCores), memMb=15000))
nlp_jobs.append(self.addTask(label="nlp_job_-_%i" % (nlp_job_number), command=cmd, dependencies=mkdir_jobs, nCores=min(4, self.nCores)))
def analyze_jobs(inputDir, outputDir, level=1):
@ -109,7 +109,7 @@ def main():
args.nCores
)
retval = wflow.run(nCores=args.nCores, memMb=30000)
retval = wflow.run(nCores=args.nCores)
sys.exit(retval)