mirror of
https://github.com/YosysHQ/sby.git
synced 2025-08-16 09:55:30 +00:00
Add -j<N> parallel tasks argument
This allows up to N tasks to be run in parallel within a multiprocessing Pool. The default behaviour is still one task at once.
This commit is contained in:
parent
15278f1346
commit
64c7da9940
1 changed files with 9 additions and 8 deletions
|
@ -17,7 +17,7 @@
|
||||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
#
|
#
|
||||||
|
|
||||||
import argparse, os, sys, shutil, tempfile, re
|
import argparse, os, sys, shutil, tempfile, re, multiprocessing, functools
|
||||||
##yosys-sys-path##
|
##yosys-sys-path##
|
||||||
from sby_core import SbyJob, SbyAbort, process_filename
|
from sby_core import SbyJob, SbyAbort, process_filename
|
||||||
from time import localtime
|
from time import localtime
|
||||||
|
@ -46,6 +46,8 @@ parser.add_argument("-T", metavar="<taskname>", action="append", dest="tasknames
|
||||||
help="add taskname (useful when sby file is read from stdin)")
|
help="add taskname (useful when sby file is read from stdin)")
|
||||||
parser.add_argument("-E", action="store_true", dest="throw_err",
|
parser.add_argument("-E", action="store_true", dest="throw_err",
|
||||||
help="throw an exception (incl stack trace) for most errors")
|
help="throw an exception (incl stack trace) for most errors")
|
||||||
|
parser.add_argument("-j", metavar="<N>", dest="num_jobs", type=int, default=1,
|
||||||
|
help="run up to <N> tasks in parallel")
|
||||||
|
|
||||||
parser.add_argument("--yosys", metavar="<path_to_executable>",
|
parser.add_argument("--yosys", metavar="<path_to_executable>",
|
||||||
action=DictAction, dest="exe_paths")
|
action=DictAction, dest="exe_paths")
|
||||||
|
@ -94,6 +96,7 @@ opt_backup = args.backup
|
||||||
opt_tmpdir = args.tmpdir
|
opt_tmpdir = args.tmpdir
|
||||||
exe_paths = args.exe_paths
|
exe_paths = args.exe_paths
|
||||||
throw_err = args.throw_err
|
throw_err = args.throw_err
|
||||||
|
num_jobs = args.num_jobs
|
||||||
dump_cfg = args.dump_cfg
|
dump_cfg = args.dump_cfg
|
||||||
dump_tasks = args.dump_tasks
|
dump_tasks = args.dump_tasks
|
||||||
dump_files = args.dump_files
|
dump_files = args.dump_files
|
||||||
|
@ -435,14 +438,12 @@ def run_job(taskname):
|
||||||
|
|
||||||
return job.retcode
|
return job.retcode
|
||||||
|
|
||||||
|
task_retcodes = []
|
||||||
|
with multiprocessing.Pool(num_jobs) as p:
|
||||||
|
task_retcodes = p.map(run_job, tasknames)
|
||||||
|
|
||||||
failed = []
|
retcode = functools.reduce(lambda a, b: a | b, task_retcodes)
|
||||||
retcode = 0
|
failed = [tasknames[idx] for (idx, rc) in enumerate(task_retcodes) if rc]
|
||||||
for task in tasknames:
|
|
||||||
task_retcode = run_job(task)
|
|
||||||
retcode |= task_retcode
|
|
||||||
if task_retcode:
|
|
||||||
failed.append(task)
|
|
||||||
|
|
||||||
if failed and (len(tasknames) > 1 or tasknames[0] is not None):
|
if failed and (len(tasknames) > 1 or tasknames[0] is not None):
|
||||||
tm = localtime()
|
tm = localtime()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue