mirror of
https://github.com/Z3Prover/z3
synced 2025-04-22 16:45:31 +00:00
Merge branch 'master' of https://github.com/Z3Prover/z3 into new-ml-api
This commit is contained in:
commit
b178420797
174 changed files with 9762 additions and 2502 deletions
|
@ -4,44 +4,48 @@ Reads a list of Z3 API header files and
|
|||
generate the constant declarations need
|
||||
by one or more Z3 language bindings
|
||||
"""
|
||||
import mk_util
|
||||
import mk_genfile_common
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
def check_dir(output_dir):
|
||||
if not os.path.isdir(output_dir):
|
||||
logging.error('"{}" is not an existing directory'.format(output_dir))
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def main(args):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("api_files", nargs="+")
|
||||
parser.add_argument("--z3py-output-dir", dest="z3py_output_dir", default=None)
|
||||
pargs = parser.parse_args(args)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("api_files", nargs="+")
|
||||
parser.add_argument("--z3py-output-dir", dest="z3py_output_dir", default=None)
|
||||
parser.add_argument("--dotnet-output-dir", dest="dotnet_output_dir", default=None)
|
||||
pargs = parser.parse_args(args)
|
||||
|
||||
for api_file in pargs.api_files:
|
||||
if not os.path.exists(api_file):
|
||||
logging.error('"{}" does not exist'.format(api_file))
|
||||
return 1
|
||||
if not mk_genfile_common.check_files_exist(pargs.api_files):
|
||||
logging.error('One or more API files do not exist')
|
||||
return 1
|
||||
|
||||
count = 0
|
||||
count = 0
|
||||
|
||||
if pargs.z3py_output_dir:
|
||||
if check_dir(pargs.z3py_output_dir) != 0:
|
||||
return 1
|
||||
mk_util.mk_z3consts_py_internal(pargs.api_files, pargs.z3py_output_dir)
|
||||
count += 1
|
||||
if pargs.z3py_output_dir:
|
||||
if not mk_genfile_common.check_dir_exists(pargs.z3py_output_dir):
|
||||
return 1
|
||||
output = mk_genfile_common.mk_z3consts_py_internal(pargs.api_files, pargs.z3py_output_dir)
|
||||
logging.info('Generated "{}"'.format(output))
|
||||
count += 1
|
||||
|
||||
if count == 0:
|
||||
logging.info('No files generated. You need to specific an output directory'
|
||||
' for the relevant langauge bindings')
|
||||
# TODO: Add support for other bindings
|
||||
return 0
|
||||
if pargs.dotnet_output_dir:
|
||||
if not mk_genfile_common.check_dir_exists(pargs.dotnet_output_dir):
|
||||
return 1
|
||||
output = mk_genfile_common.mk_z3consts_dotnet_internal(
|
||||
pargs.api_files,
|
||||
pargs.dotnet_output_dir)
|
||||
logging.info('Generated "{}"'.format(output))
|
||||
count += 1
|
||||
|
||||
if count == 0:
|
||||
logging.info('No files generated. You need to specific an output directory'
|
||||
' for the relevant langauge bindings')
|
||||
# TODO: Add support for other bindings
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -6,31 +6,31 @@ exported symbols of a dll. This file
|
|||
can be passed to the ``/DEF`` to the
|
||||
linker used by MSVC.
|
||||
"""
|
||||
import mk_util
|
||||
import mk_genfile_common
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
def main(args):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("output_file", help="output def file path")
|
||||
parser.add_argument("dllname", help="dllname to use in def file")
|
||||
parser.add_argument("api_files", nargs="+")
|
||||
pargs = parser.parse_args(args)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("output_file", help="output def file path")
|
||||
parser.add_argument("dllname", help="dllname to use in def file")
|
||||
parser.add_argument("api_files", nargs="+")
|
||||
pargs = parser.parse_args(args)
|
||||
|
||||
for api_file in pargs.api_files:
|
||||
if not os.path.exists(api_file):
|
||||
logging.error('"{}" does not exist'.format(api_file))
|
||||
return 1
|
||||
if not mk_genfile_common.check_files_exist(pargs.api_files):
|
||||
logging.error('One or more api files do not exist')
|
||||
return 1
|
||||
|
||||
mk_util.mk_def_file_internal(
|
||||
pargs.output_file,
|
||||
pargs.dllname,
|
||||
pargs.api_files)
|
||||
return 0
|
||||
mk_genfile_common.mk_def_file_internal(
|
||||
pargs.output_file,
|
||||
pargs.dllname,
|
||||
pargs.api_files)
|
||||
logging.info('Generated "{}"'.format(pargs.output_file))
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
|
|
655
scripts/mk_genfile_common.py
Normal file
655
scripts/mk_genfile_common.py
Normal file
|
@ -0,0 +1,655 @@
|
|||
# This file contains code that is common to
|
||||
# both the Python build system and the CMake
|
||||
# build system.
|
||||
#
|
||||
# The code here generally is involved in
|
||||
# generating files needed by Z3 at build time.
|
||||
#
|
||||
# You should **not** import ``mk_util`` here
|
||||
# to avoid having this code depend on the
|
||||
# of the Python build system.
|
||||
import os
|
||||
import pprint
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Logger for this module
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Utility functions
|
||||
###############################################################################
|
||||
def check_dir_exists(output_dir):
|
||||
"""
|
||||
Returns ``True`` if ``output_dir`` exists, otherwise
|
||||
returns ``False``.
|
||||
"""
|
||||
if not os.path.isdir(output_dir):
|
||||
_logger.error('"{}" is not an existing directory'.format(output_dir))
|
||||
return False
|
||||
return True
|
||||
|
||||
def check_files_exist(files):
|
||||
assert isinstance(files, list)
|
||||
for f in files:
|
||||
if not os.path.exists(f):
|
||||
_logger.error('"{}" does not exist'.format(f))
|
||||
return False
|
||||
return True
|
||||
|
||||
def sorted_headers_by_component(l):
|
||||
"""
|
||||
Take a list of header files and sort them by the
|
||||
path after ``src/``. E.g. for ``src/ast/fpa/fpa2bv_converter.h`` the sorting
|
||||
key is ``ast/fpa/fpa2bv_converter.h``.
|
||||
|
||||
The sort is done this way because for the CMake build
|
||||
there are two directories for every component (e.g.
|
||||
``<src_dir>/src/ast/fpa`` and ``<build_dir>/src/ast/fpa``).
|
||||
We don't want to sort based on different ``<src_dir>``
|
||||
and ``<build_dir>`` prefixes so that we can match the Python build
|
||||
system's behaviour.
|
||||
"""
|
||||
assert isinstance(l, list)
|
||||
def get_key(path):
|
||||
_logger.debug("get_key({})".format(path))
|
||||
path_components = []
|
||||
stripped_path = path
|
||||
assert 'src' in stripped_path.split(os.path.sep) or 'src' in stripped_path.split('/')
|
||||
# Keep stripping off directory components until we hit ``src``
|
||||
while os.path.basename(stripped_path) != 'src':
|
||||
path_components.append(os.path.basename(stripped_path))
|
||||
stripped_path = os.path.dirname(stripped_path)
|
||||
assert len(path_components) > 0
|
||||
path_components.reverse()
|
||||
# For consistency across platforms use ``/`` rather than ``os.sep``.
|
||||
# This is a sorting key so it doesn't need to a platform suitable
|
||||
# path
|
||||
r = '/'.join(path_components)
|
||||
_logger.debug("return key:'{}'".format(r))
|
||||
return r
|
||||
sorted_headers = sorted(l, key=get_key)
|
||||
_logger.debug('sorted headers:{}'.format(pprint.pformat(sorted_headers)))
|
||||
return sorted_headers
|
||||
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Functions for generating constant declarations for language bindings
|
||||
###############################################################################
|
||||
|
||||
def mk_z3consts_py_internal(api_files, output_dir):
|
||||
"""
|
||||
Generate ``z3consts.py`` from the list of API header files
|
||||
in ``api_files`` and write the output file into
|
||||
the ``output_dir`` directory
|
||||
|
||||
Returns the path to the generated file.
|
||||
"""
|
||||
assert os.path.isdir(output_dir)
|
||||
assert isinstance(api_files, list)
|
||||
|
||||
blank_pat = re.compile("^ *\r?$")
|
||||
comment_pat = re.compile("^ *//.*$")
|
||||
typedef_pat = re.compile("typedef enum *")
|
||||
typedef2_pat = re.compile("typedef enum { *")
|
||||
openbrace_pat = re.compile("{ *")
|
||||
closebrace_pat = re.compile("}.*;")
|
||||
|
||||
z3consts = open(os.path.join(output_dir, 'z3consts.py'), 'w')
|
||||
z3consts_output_path = z3consts.name
|
||||
z3consts.write('# Automatically generated file\n\n')
|
||||
for api_file in api_files:
|
||||
api = open(api_file, 'r')
|
||||
|
||||
SEARCHING = 0
|
||||
FOUND_ENUM = 1
|
||||
IN_ENUM = 2
|
||||
|
||||
mode = SEARCHING
|
||||
decls = {}
|
||||
idx = 0
|
||||
|
||||
linenum = 1
|
||||
for line in api:
|
||||
m1 = blank_pat.match(line)
|
||||
m2 = comment_pat.match(line)
|
||||
if m1 or m2:
|
||||
# skip blank lines and comments
|
||||
linenum = linenum + 1
|
||||
elif mode == SEARCHING:
|
||||
m = typedef_pat.match(line)
|
||||
if m:
|
||||
mode = FOUND_ENUM
|
||||
m = typedef2_pat.match(line)
|
||||
if m:
|
||||
mode = IN_ENUM
|
||||
decls = {}
|
||||
idx = 0
|
||||
elif mode == FOUND_ENUM:
|
||||
m = openbrace_pat.match(line)
|
||||
if m:
|
||||
mode = IN_ENUM
|
||||
decls = {}
|
||||
idx = 0
|
||||
else:
|
||||
assert False, "Invalid %s, line: %s" % (api_file, linenum)
|
||||
else:
|
||||
assert mode == IN_ENUM
|
||||
words = re.split('[^\-a-zA-Z0-9_]+', line)
|
||||
m = closebrace_pat.match(line)
|
||||
if m:
|
||||
name = words[1]
|
||||
z3consts.write('# enum %s\n' % name)
|
||||
# Iterate over key-value pairs ordered by value
|
||||
for k, v in sorted(decls.items(), key=lambda pair: pair[1]):
|
||||
z3consts.write('%s = %s\n' % (k, v))
|
||||
z3consts.write('\n')
|
||||
mode = SEARCHING
|
||||
elif len(words) <= 2:
|
||||
assert False, "Invalid %s, line: %s" % (api_file, linenum)
|
||||
else:
|
||||
if words[2] != '':
|
||||
if len(words[2]) > 1 and words[2][1] == 'x':
|
||||
idx = int(words[2], 16)
|
||||
else:
|
||||
idx = int(words[2])
|
||||
decls[words[1]] = idx
|
||||
idx = idx + 1
|
||||
linenum = linenum + 1
|
||||
api.close()
|
||||
z3consts.close()
|
||||
return z3consts_output_path
|
||||
|
||||
def mk_z3consts_dotnet_internal(api_files, output_dir):
|
||||
"""
|
||||
Generate ``Enumerations.cs`` from the list of API header files
|
||||
in ``api_files`` and write the output file into
|
||||
the ``output_dir`` directory
|
||||
|
||||
Returns the path to the generated file.
|
||||
"""
|
||||
assert os.path.isdir(output_dir)
|
||||
assert isinstance(api_files, list)
|
||||
blank_pat = re.compile("^ *\r?$")
|
||||
comment_pat = re.compile("^ *//.*$")
|
||||
typedef_pat = re.compile("typedef enum *")
|
||||
typedef2_pat = re.compile("typedef enum { *")
|
||||
openbrace_pat = re.compile("{ *")
|
||||
closebrace_pat = re.compile("}.*;")
|
||||
|
||||
DeprecatedEnums = [ 'Z3_search_failure' ]
|
||||
z3consts = open(os.path.join(output_dir, 'Enumerations.cs'), 'w')
|
||||
z3consts_output_path = z3consts.name
|
||||
z3consts.write('// Automatically generated file\n\n')
|
||||
z3consts.write('using System;\n\n'
|
||||
'#pragma warning disable 1591\n\n'
|
||||
'namespace Microsoft.Z3\n'
|
||||
'{\n')
|
||||
|
||||
for api_file in api_files:
|
||||
api = open(api_file, 'r')
|
||||
|
||||
SEARCHING = 0
|
||||
FOUND_ENUM = 1
|
||||
IN_ENUM = 2
|
||||
|
||||
mode = SEARCHING
|
||||
decls = {}
|
||||
idx = 0
|
||||
|
||||
linenum = 1
|
||||
for line in api:
|
||||
m1 = blank_pat.match(line)
|
||||
m2 = comment_pat.match(line)
|
||||
if m1 or m2:
|
||||
# skip blank lines and comments
|
||||
linenum = linenum + 1
|
||||
elif mode == SEARCHING:
|
||||
m = typedef_pat.match(line)
|
||||
if m:
|
||||
mode = FOUND_ENUM
|
||||
m = typedef2_pat.match(line)
|
||||
if m:
|
||||
mode = IN_ENUM
|
||||
decls = {}
|
||||
idx = 0
|
||||
elif mode == FOUND_ENUM:
|
||||
m = openbrace_pat.match(line)
|
||||
if m:
|
||||
mode = IN_ENUM
|
||||
decls = {}
|
||||
idx = 0
|
||||
else:
|
||||
assert False, "Invalid %s, line: %s" % (api_file, linenum)
|
||||
else:
|
||||
assert mode == IN_ENUM
|
||||
words = re.split('[^\-a-zA-Z0-9_]+', line)
|
||||
m = closebrace_pat.match(line)
|
||||
if m:
|
||||
name = words[1]
|
||||
if name not in DeprecatedEnums:
|
||||
z3consts.write(' /// <summary>%s</summary>\n' % name)
|
||||
z3consts.write(' public enum %s {\n' % name)
|
||||
z3consts.write
|
||||
# Iterate over key-value pairs ordered by value
|
||||
for k, v in sorted(decls.items(), key=lambda pair: pair[1]):
|
||||
z3consts.write(' %s = %s,\n' % (k, v))
|
||||
z3consts.write(' }\n\n')
|
||||
mode = SEARCHING
|
||||
elif len(words) <= 2:
|
||||
assert False, "Invalid %s, line: %s" % (api_file, linenum)
|
||||
else:
|
||||
if words[2] != '':
|
||||
if len(words[2]) > 1 and words[2][1] == 'x':
|
||||
idx = int(words[2], 16)
|
||||
else:
|
||||
idx = int(words[2])
|
||||
decls[words[1]] = idx
|
||||
idx = idx + 1
|
||||
linenum = linenum + 1
|
||||
api.close()
|
||||
z3consts.write('}\n');
|
||||
z3consts.close()
|
||||
return z3consts_output_path
|
||||
|
||||
###############################################################################
|
||||
# Functions for generating a "module definition file" for MSVC
|
||||
###############################################################################
|
||||
|
||||
def mk_def_file_internal(defname, dll_name, export_header_files):
|
||||
"""
|
||||
Writes to a module definition file to a file named ``defname``.
|
||||
|
||||
``dll_name`` is the name of the dll (without the ``.dll`` suffix).
|
||||
``export_header_file`` is a list of header files to scan for symbols
|
||||
to include in the module definition file.
|
||||
"""
|
||||
assert isinstance(export_header_files, list)
|
||||
pat1 = re.compile(".*Z3_API.*")
|
||||
fout = open(defname, 'w')
|
||||
fout.write('LIBRARY "%s"\nEXPORTS\n' % dll_name)
|
||||
num = 1
|
||||
for export_header_file in export_header_files:
|
||||
api = open(export_header_file, 'r')
|
||||
for line in api:
|
||||
m = pat1.match(line)
|
||||
if m:
|
||||
words = re.split('\W+', line)
|
||||
i = 0
|
||||
for w in words:
|
||||
if w == 'Z3_API':
|
||||
f = words[i+1]
|
||||
fout.write('\t%s @%s\n' % (f, num))
|
||||
i = i + 1
|
||||
num = num + 1
|
||||
api.close()
|
||||
fout.close()
|
||||
|
||||
###############################################################################
|
||||
# Functions for generating ``gparams_register_modules.cpp``
|
||||
###############################################################################
|
||||
def mk_gparams_register_modules_internal(component_src_dirs, path):
|
||||
"""
|
||||
Generate a ``gparams_register_modules.cpp`` file in the directory ``path``.
|
||||
Returns the path to the generated file.
|
||||
|
||||
This file implements the procedure
|
||||
|
||||
```
|
||||
void gparams_register_modules()
|
||||
```
|
||||
|
||||
This procedure is invoked by gparams::init()
|
||||
"""
|
||||
assert isinstance(component_src_dirs, list)
|
||||
assert check_dir_exists(path)
|
||||
cmds = []
|
||||
mod_cmds = []
|
||||
mod_descrs = []
|
||||
fullname = os.path.join(path, 'gparams_register_modules.cpp')
|
||||
fout = open(fullname, 'w')
|
||||
fout.write('// Automatically generated file.\n')
|
||||
fout.write('#include"gparams.h"\n')
|
||||
reg_pat = re.compile('[ \t]*REG_PARAMS\(\'([^\']*)\'\)')
|
||||
reg_mod_pat = re.compile('[ \t]*REG_MODULE_PARAMS\(\'([^\']*)\', *\'([^\']*)\'\)')
|
||||
reg_mod_descr_pat = re.compile('[ \t]*REG_MODULE_DESCRIPTION\(\'([^\']*)\', *\'([^\']*)\'\)')
|
||||
h_files_full_path = []
|
||||
for component_src_dir in component_src_dirs:
|
||||
h_files = filter(lambda f: f.endswith('.h') or f.endswith('.hpp'), os.listdir(component_src_dir))
|
||||
h_files = list(map(lambda p: os.path.join(component_src_dir, p), h_files))
|
||||
h_files_full_path.extend(h_files)
|
||||
for h_file in sorted_headers_by_component(h_files_full_path):
|
||||
added_include = False
|
||||
with open(h_file, 'r') as fin:
|
||||
for line in fin:
|
||||
m = reg_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % os.path.basename(h_file))
|
||||
cmds.append((m.group(1)))
|
||||
m = reg_mod_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % os.path.basename(h_file))
|
||||
mod_cmds.append((m.group(1), m.group(2)))
|
||||
m = reg_mod_descr_pat.match(line)
|
||||
if m:
|
||||
mod_descrs.append((m.group(1), m.group(2)))
|
||||
fout.write('void gparams_register_modules() {\n')
|
||||
for code in cmds:
|
||||
fout.write('{ param_descrs d; %s(d); gparams::register_global(d); }\n' % code)
|
||||
for (mod, code) in mod_cmds:
|
||||
fout.write('{ param_descrs * d = alloc(param_descrs); %s(*d); gparams::register_module("%s", d); }\n' % (code, mod))
|
||||
for (mod, descr) in mod_descrs:
|
||||
fout.write('gparams::register_module_descr("%s", "%s");\n' % (mod, descr))
|
||||
fout.write('}\n')
|
||||
fout.close()
|
||||
return fullname
|
||||
|
||||
###############################################################################
|
||||
# Functions/data structures for generating ``install_tactics.cpp``
|
||||
###############################################################################
|
||||
|
||||
def mk_install_tactic_cpp_internal(component_src_dirs, path):
|
||||
"""
|
||||
Generate a ``install_tactics.cpp`` file in the directory ``path``.
|
||||
Returns the path the generated file.
|
||||
|
||||
This file implements the procedure
|
||||
|
||||
```
|
||||
void install_tactics(tactic_manager & ctx)
|
||||
```
|
||||
|
||||
It installs all tactics found in the given component directories
|
||||
``component_src_dirs`` The procedure looks for ``ADD_TACTIC`` commands
|
||||
in the ``.h`` and ``.hpp`` files of these components.
|
||||
"""
|
||||
ADD_TACTIC_DATA = []
|
||||
ADD_PROBE_DATA = []
|
||||
def ADD_TACTIC(name, descr, cmd):
|
||||
ADD_TACTIC_DATA.append((name, descr, cmd))
|
||||
|
||||
def ADD_PROBE(name, descr, cmd):
|
||||
ADD_PROBE_DATA.append((name, descr, cmd))
|
||||
|
||||
eval_globals = {
|
||||
'ADD_TACTIC': ADD_TACTIC,
|
||||
'ADD_PROBE': ADD_PROBE,
|
||||
}
|
||||
|
||||
assert isinstance(component_src_dirs, list)
|
||||
assert check_dir_exists(path)
|
||||
fullname = os.path.join(path, 'install_tactic.cpp')
|
||||
fout = open(fullname, 'w')
|
||||
fout.write('// Automatically generated file.\n')
|
||||
fout.write('#include"tactic.h"\n')
|
||||
fout.write('#include"tactic_cmds.h"\n')
|
||||
fout.write('#include"cmd_context.h"\n')
|
||||
tactic_pat = re.compile('[ \t]*ADD_TACTIC\(.*\)')
|
||||
probe_pat = re.compile('[ \t]*ADD_PROBE\(.*\)')
|
||||
h_files_full_path = []
|
||||
for component_src_dir in sorted(component_src_dirs):
|
||||
h_files = filter(lambda f: f.endswith('.h') or f.endswith('.hpp'), os.listdir(component_src_dir))
|
||||
h_files = list(map(lambda p: os.path.join(component_src_dir, p), h_files))
|
||||
h_files_full_path.extend(h_files)
|
||||
for h_file in sorted_headers_by_component(h_files_full_path):
|
||||
added_include = False
|
||||
with open(h_file, 'r') as fin:
|
||||
for line in fin:
|
||||
if tactic_pat.match(line):
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % os.path.basename(h_file))
|
||||
try:
|
||||
eval(line.strip('\n '), eval_globals, None)
|
||||
except Exception as e:
|
||||
_logger.error("Failed processing ADD_TACTIC command at '{}'\n{}".format(
|
||||
fullname, line))
|
||||
raise e
|
||||
if probe_pat.match(line):
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % os.path.basename(h_file))
|
||||
try:
|
||||
eval(line.strip('\n '), eval_globals, None)
|
||||
except Exception as e:
|
||||
_logger.error("Failed processing ADD_PROBE command at '{}'\n{}".format(
|
||||
fullname, line))
|
||||
raise e
|
||||
# First pass will just generate the tactic factories
|
||||
idx = 0
|
||||
for data in ADD_TACTIC_DATA:
|
||||
fout.write('MK_SIMPLE_TACTIC_FACTORY(__Z3_local_factory_%s, %s);\n' % (idx, data[2]))
|
||||
idx = idx + 1
|
||||
fout.write('#define ADD_TACTIC_CMD(NAME, DESCR, FACTORY) ctx.insert(alloc(tactic_cmd, symbol(NAME), DESCR, alloc(FACTORY)))\n')
|
||||
fout.write('#define ADD_PROBE(NAME, DESCR, PROBE) ctx.insert(alloc(probe_info, symbol(NAME), DESCR, PROBE))\n')
|
||||
fout.write('void install_tactics(tactic_manager & ctx) {\n')
|
||||
idx = 0
|
||||
for data in ADD_TACTIC_DATA:
|
||||
fout.write(' ADD_TACTIC_CMD("%s", "%s", __Z3_local_factory_%s);\n' % (data[0], data[1], idx))
|
||||
idx = idx + 1
|
||||
for data in ADD_PROBE_DATA:
|
||||
fout.write(' ADD_PROBE("%s", "%s", %s);\n' % data)
|
||||
fout.write('}\n')
|
||||
fout.close()
|
||||
return fullname
|
||||
|
||||
###############################################################################
|
||||
# Functions for generating ``mem_initializer.cpp``
|
||||
###############################################################################
|
||||
|
||||
def mk_mem_initializer_cpp_internal(component_src_dirs, path):
|
||||
"""
|
||||
Generate a ``mem_initializer.cpp`` file in the directory ``path``.
|
||||
Returns the path to the generated file.
|
||||
|
||||
This file implements the procedures
|
||||
|
||||
```
|
||||
void mem_initialize()
|
||||
void mem_finalize()
|
||||
```
|
||||
|
||||
These procedures are invoked by the Z3 memory_manager
|
||||
"""
|
||||
assert isinstance(component_src_dirs, list)
|
||||
assert check_dir_exists(path)
|
||||
initializer_cmds = []
|
||||
finalizer_cmds = []
|
||||
fullname = os.path.join(path, 'mem_initializer.cpp')
|
||||
fout = open(fullname, 'w')
|
||||
fout.write('// Automatically generated file.\n')
|
||||
initializer_pat = re.compile('[ \t]*ADD_INITIALIZER\(\'([^\']*)\'\)')
|
||||
# ADD_INITIALIZER with priority
|
||||
initializer_prio_pat = re.compile('[ \t]*ADD_INITIALIZER\(\'([^\']*)\',[ \t]*(-?[0-9]*)\)')
|
||||
finalizer_pat = re.compile('[ \t]*ADD_FINALIZER\(\'([^\']*)\'\)')
|
||||
h_files_full_path = []
|
||||
for component_src_dir in sorted(component_src_dirs):
|
||||
h_files = filter(lambda f: f.endswith('.h') or f.endswith('.hpp'), os.listdir(component_src_dir))
|
||||
h_files = list(map(lambda p: os.path.join(component_src_dir, p), h_files))
|
||||
h_files_full_path.extend(h_files)
|
||||
for h_file in sorted_headers_by_component(h_files_full_path):
|
||||
added_include = False
|
||||
with open(h_file, 'r') as fin:
|
||||
for line in fin:
|
||||
m = initializer_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % os.path.basename(h_file))
|
||||
initializer_cmds.append((m.group(1), 0))
|
||||
m = initializer_prio_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % os.path.basename(h_file))
|
||||
initializer_cmds.append((m.group(1), int(m.group(2))))
|
||||
m = finalizer_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % os.path.basename(h_file))
|
||||
finalizer_cmds.append(m.group(1))
|
||||
initializer_cmds.sort(key=lambda tup: tup[1])
|
||||
fout.write('void mem_initialize() {\n')
|
||||
for (cmd, prio) in initializer_cmds:
|
||||
fout.write(cmd)
|
||||
fout.write('\n')
|
||||
fout.write('}\n')
|
||||
fout.write('void mem_finalize() {\n')
|
||||
for cmd in finalizer_cmds:
|
||||
fout.write(cmd)
|
||||
fout.write('\n')
|
||||
fout.write('}\n')
|
||||
fout.close()
|
||||
return fullname
|
||||
|
||||
###############################################################################
|
||||
# Functions for generating ``database.h``
|
||||
###############################################################################
|
||||
|
||||
def mk_pat_db_internal(inputFilePath, outputFilePath):
|
||||
"""
|
||||
Generate ``g_pattern_database[]`` declaration header file.
|
||||
"""
|
||||
with open(inputFilePath, 'r') as fin:
|
||||
with open(outputFilePath, 'w') as fout:
|
||||
fout.write('static char const g_pattern_database[] =\n')
|
||||
for line in fin:
|
||||
fout.write('"%s\\n"\n' % line.strip('\n'))
|
||||
fout.write(';\n')
|
||||
|
||||
###############################################################################
|
||||
# Functions and data structures for generating ``*_params.hpp`` files from
|
||||
# ``*.pyg`` files
|
||||
###############################################################################
|
||||
|
||||
UINT = 0
|
||||
BOOL = 1
|
||||
DOUBLE = 2
|
||||
STRING = 3
|
||||
SYMBOL = 4
|
||||
UINT_MAX = 4294967295
|
||||
|
||||
TYPE2CPK = { UINT : 'CPK_UINT', BOOL : 'CPK_BOOL', DOUBLE : 'CPK_DOUBLE', STRING : 'CPK_STRING', SYMBOL : 'CPK_SYMBOL' }
|
||||
TYPE2CTYPE = { UINT : 'unsigned', BOOL : 'bool', DOUBLE : 'double', STRING : 'char const *', SYMBOL : 'symbol' }
|
||||
TYPE2GETTER = { UINT : 'get_uint', BOOL : 'get_bool', DOUBLE : 'get_double', STRING : 'get_str', SYMBOL : 'get_sym' }
|
||||
|
||||
def pyg_default(p):
|
||||
if p[1] == BOOL:
|
||||
if p[2]:
|
||||
return "true"
|
||||
else:
|
||||
return "false"
|
||||
return p[2]
|
||||
|
||||
def pyg_default_as_c_literal(p):
|
||||
if p[1] == BOOL:
|
||||
if p[2]:
|
||||
return "true"
|
||||
else:
|
||||
return "false"
|
||||
elif p[1] == STRING:
|
||||
return '"%s"' % p[2]
|
||||
elif p[1] == SYMBOL:
|
||||
return 'symbol("%s")' % p[2]
|
||||
elif p[1] == UINT:
|
||||
return '%su' % p[2]
|
||||
else:
|
||||
return p[2]
|
||||
|
||||
def to_c_method(s):
|
||||
return s.replace('.', '_')
|
||||
|
||||
|
||||
def max_memory_param():
|
||||
return ('max_memory', UINT, UINT_MAX, 'maximum amount of memory in megabytes')
|
||||
|
||||
def max_steps_param():
|
||||
return ('max_steps', UINT, UINT_MAX, 'maximum number of steps')
|
||||
|
||||
def mk_hpp_from_pyg(pyg_file, output_dir):
|
||||
"""
|
||||
Generates the corresponding header file for the input pyg file
|
||||
at the path ``pyg_file``. The file is emitted into the directory
|
||||
``output_dir``.
|
||||
|
||||
Returns the path to the generated file
|
||||
"""
|
||||
CURRENT_PYG_HPP_DEST_DIR = output_dir
|
||||
# Note OUTPUT_HPP_FILE cannot be a string as we need a mutable variable
|
||||
# for the nested function to modify
|
||||
OUTPUT_HPP_FILE = [ ]
|
||||
# The function below has been nested so that it can use closure to capture
|
||||
# the above variables that aren't global but instead local to this
|
||||
# function. This avoids use of global state which makes this function safer.
|
||||
def def_module_params(module_name, export, params, class_name=None, description=None):
|
||||
dirname = CURRENT_PYG_HPP_DEST_DIR
|
||||
assert(os.path.exists(dirname))
|
||||
if class_name is None:
|
||||
class_name = '%s_params' % module_name
|
||||
hpp = os.path.join(dirname, '%s.hpp' % class_name)
|
||||
out = open(hpp, 'w')
|
||||
out.write('// Automatically generated file\n')
|
||||
out.write('#ifndef __%s_HPP_\n' % class_name.upper())
|
||||
out.write('#define __%s_HPP_\n' % class_name.upper())
|
||||
out.write('#include"params.h"\n')
|
||||
if export:
|
||||
out.write('#include"gparams.h"\n')
|
||||
out.write('struct %s {\n' % class_name)
|
||||
out.write(' params_ref const & p;\n')
|
||||
if export:
|
||||
out.write(' params_ref g;\n')
|
||||
out.write(' %s(params_ref const & _p = params_ref::get_empty()):\n' % class_name)
|
||||
out.write(' p(_p)')
|
||||
if export:
|
||||
out.write(', g(gparams::get_module("%s"))' % module_name)
|
||||
out.write(' {}\n')
|
||||
out.write(' static void collect_param_descrs(param_descrs & d) {\n')
|
||||
for param in params:
|
||||
out.write(' d.insert("%s", %s, "%s", "%s","%s");\n' % (param[0], TYPE2CPK[param[1]], param[3], pyg_default(param), module_name))
|
||||
out.write(' }\n')
|
||||
if export:
|
||||
out.write(' /*\n')
|
||||
out.write(" REG_MODULE_PARAMS('%s', '%s::collect_param_descrs')\n" % (module_name, class_name))
|
||||
if description is not None:
|
||||
out.write(" REG_MODULE_DESCRIPTION('%s', '%s')\n" % (module_name, description))
|
||||
out.write(' */\n')
|
||||
# Generated accessors
|
||||
for param in params:
|
||||
if export:
|
||||
out.write(' %s %s() const { return p.%s("%s", g, %s); }\n' %
|
||||
(TYPE2CTYPE[param[1]], to_c_method(param[0]), TYPE2GETTER[param[1]], param[0], pyg_default_as_c_literal(param)))
|
||||
else:
|
||||
out.write(' %s %s() const { return p.%s("%s", %s); }\n' %
|
||||
(TYPE2CTYPE[param[1]], to_c_method(param[0]), TYPE2GETTER[param[1]], param[0], pyg_default_as_c_literal(param)))
|
||||
out.write('};\n')
|
||||
out.write('#endif\n')
|
||||
out.close()
|
||||
OUTPUT_HPP_FILE.append(hpp)
|
||||
|
||||
# Globals to use when executing the ``.pyg`` file.
|
||||
pyg_globals = {
|
||||
'UINT' : UINT,
|
||||
'BOOL' : BOOL,
|
||||
'DOUBLE' : DOUBLE,
|
||||
'STRING' : STRING,
|
||||
'SYMBOL' : SYMBOL,
|
||||
'UINT_MAX' : UINT_MAX,
|
||||
'max_memory_param' : max_memory_param,
|
||||
'max_steps_param' : max_steps_param,
|
||||
# Note that once this function is enterred that function
|
||||
# executes with respect to the globals of this module and
|
||||
# not the globals defined here
|
||||
'def_module_params' : def_module_params,
|
||||
}
|
||||
with open(pyg_file, 'r') as fh:
|
||||
eval(fh.read() + "\n", pyg_globals, None)
|
||||
assert len(OUTPUT_HPP_FILE) == 1
|
||||
return OUTPUT_HPP_FILE[0]
|
|
@ -6,42 +6,34 @@ and generates a ``gparams_register_modules.cpp`` file in
|
|||
the destination directory that defines a function
|
||||
``void gparams_register_modules()``.
|
||||
"""
|
||||
import mk_util
|
||||
import mk_genfile_common
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
def check_dir(path):
|
||||
if not os.path.exists(path):
|
||||
logging.error('"{}" does not exist'.format(path))
|
||||
return 1
|
||||
|
||||
if not os.path.isdir(path):
|
||||
logging.error('"{}" is not a directory'.format(path))
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def main(args):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("destination_dir", help="destination directory")
|
||||
parser.add_argument("source_dirs", nargs="+",
|
||||
help="One or more source directories to search")
|
||||
pargs = parser.parse_args(args)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("destination_dir", help="destination directory")
|
||||
parser.add_argument("source_dirs", nargs="+",
|
||||
help="One or more source directories to search")
|
||||
pargs = parser.parse_args(args)
|
||||
|
||||
if check_dir(pargs.destination_dir) != 0:
|
||||
return 1
|
||||
if not mk_genfile_common.check_dir_exists(pargs.destination_dir):
|
||||
return 1
|
||||
|
||||
for source_dir in pargs.source_dirs:
|
||||
if check_dir(source_dir) != 0:
|
||||
return 1
|
||||
for source_dir in pargs.source_dirs:
|
||||
if not mk_genfile_common.check_dir_exists(source_dir):
|
||||
return 1
|
||||
|
||||
mk_util.mk_gparams_register_modules_internal(pargs.source_dirs, pargs.destination_dir)
|
||||
return 0
|
||||
output = mk_genfile_common.mk_gparams_register_modules_internal(
|
||||
pargs.source_dirs,
|
||||
pargs.destination_dir
|
||||
)
|
||||
logging.info('Generated "{}"'.format(output))
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
|
|
|
@ -6,42 +6,33 @@ and generates a ``install_tactic.cpp`` file in
|
|||
the destination directory that defines a function
|
||||
``void install_tactics(tactic_manager& ctx)``.
|
||||
"""
|
||||
import mk_util
|
||||
import mk_genfile_common
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
def check_dir(path):
|
||||
if not os.path.exists(path):
|
||||
logging.error('"{}" does not exist'.format(path))
|
||||
return 1
|
||||
|
||||
if not os.path.isdir(path):
|
||||
logging.error('"{}" is not a directory'.format(path))
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def main(args):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("destination_dir", help="destination directory")
|
||||
parser.add_argument("source_dirs", nargs="+",
|
||||
help="One or more source directories to search")
|
||||
pargs = parser.parse_args(args)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("destination_dir", help="destination directory")
|
||||
parser.add_argument("source_dirs", nargs="+",
|
||||
help="One or more source directories to search")
|
||||
pargs = parser.parse_args(args)
|
||||
|
||||
if check_dir(pargs.destination_dir) != 0:
|
||||
return 1
|
||||
if not mk_genfile_common.check_dir_exists(pargs.destination_dir):
|
||||
return 1
|
||||
|
||||
for source_dir in pargs.source_dirs:
|
||||
if check_dir(source_dir) != 0:
|
||||
return 1
|
||||
for source_dir in pargs.source_dirs:
|
||||
if not mk_genfile_common.check_dir_exists(source_dir):
|
||||
return 1
|
||||
|
||||
mk_util.mk_install_tactic_cpp_internal(pargs.source_dirs, pargs.destination_dir)
|
||||
return 0
|
||||
output = mk_genfile_common.mk_install_tactic_cpp_internal(
|
||||
pargs.source_dirs,
|
||||
pargs.destination_dir
|
||||
)
|
||||
logging.info('Generated "{}"'.format(output))
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -7,42 +7,34 @@ emits and implementation of
|
|||
``void mem_finalize()`` into ``mem_initializer.cpp``
|
||||
in the destination directory.
|
||||
"""
|
||||
import mk_util
|
||||
import mk_genfile_common
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
def check_dir(path):
|
||||
if not os.path.exists(path):
|
||||
logging.error('"{}" does not exist'.format(path))
|
||||
return 1
|
||||
|
||||
if not os.path.isdir(path):
|
||||
logging.error('"{}" is not a directory'.format(path))
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def main(args):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("destination_dir", help="destination directory")
|
||||
parser.add_argument("source_dirs", nargs="+",
|
||||
help="One or more source directories to search")
|
||||
pargs = parser.parse_args(args)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("destination_dir", help="destination directory")
|
||||
parser.add_argument("source_dirs", nargs="+",
|
||||
help="One or more source directories to search")
|
||||
pargs = parser.parse_args(args)
|
||||
|
||||
if check_dir(pargs.destination_dir) != 0:
|
||||
return 1
|
||||
if not mk_genfile_common.check_dir_exists(pargs.destination_dir):
|
||||
return 1
|
||||
|
||||
for source_dir in pargs.source_dirs:
|
||||
if check_dir(source_dir) != 0:
|
||||
return 1
|
||||
for source_dir in pargs.source_dirs:
|
||||
if not mk_genfile_common.check_dir_exists(source_dir):
|
||||
return 1
|
||||
|
||||
mk_util.mk_mem_initializer_cpp_internal(pargs.source_dirs, pargs.destination_dir)
|
||||
return 0
|
||||
output = mk_genfile_common.mk_mem_initializer_cpp_internal(
|
||||
pargs.source_dirs,
|
||||
pargs.destination_dir
|
||||
)
|
||||
logging.info('Generated "{}"'.format(output))
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
|
|
|
@ -3,32 +3,27 @@
|
|||
Reads a pattern database and generates the corresponding
|
||||
header file.
|
||||
"""
|
||||
import mk_util
|
||||
import mk_genfile_common
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
def main(args):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("db_file", help="pattern database file")
|
||||
parser.add_argument("output_file", help="output header file path")
|
||||
pargs = parser.parse_args(args)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("db_file", help="pattern database file")
|
||||
parser.add_argument("output_file", help="output header file path")
|
||||
pargs = parser.parse_args(args)
|
||||
|
||||
if not os.path.exists(pargs.db_file):
|
||||
logging.error('"{}" does not exist'.format(pargs.db_file))
|
||||
return 1
|
||||
if not os.path.exists(pargs.db_file):
|
||||
logging.error('"{}" does not exist'.format(pargs.db_file))
|
||||
return 1
|
||||
|
||||
if (os.path.exists(pargs.output_file) and
|
||||
not os.path.isfile(pargs.output_file)):
|
||||
logging.error('Refusing to overwrite "{}"'.format(
|
||||
pargs.output_file))
|
||||
return 1
|
||||
|
||||
mk_util.mk_pat_db_internal(pargs.db_file, pargs.output_file)
|
||||
return 0
|
||||
mk_genfile_common.mk_pat_db_internal(pargs.db_file, pargs.output_file)
|
||||
logging.info('Generated "{}"'.format(pargs.output_file))
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ def init_project_def():
|
|||
add_lib('fuzzing', ['ast'], 'test/fuzzing')
|
||||
add_lib('smt_tactic', ['smt'], 'smt/tactic')
|
||||
add_lib('sls_tactic', ['tactic', 'normal_forms', 'core_tactics', 'bv_tactics'], 'tactic/sls')
|
||||
add_lib('qe', ['smt','sat'], 'qe')
|
||||
add_lib('qe', ['smt','sat','nlsat','tactic','nlsat_tactic'], 'qe')
|
||||
add_lib('duality', ['smt', 'interp', 'qe'])
|
||||
add_lib('muz', ['smt', 'sat', 'smt2parser', 'aig_tactic', 'qe'], 'muz/base')
|
||||
add_lib('dataflow', ['muz'], 'muz/dataflow')
|
||||
|
|
|
@ -12,6 +12,7 @@ import re
|
|||
import getopt
|
||||
import shutil
|
||||
from mk_exception import *
|
||||
import mk_genfile_common
|
||||
from fnmatch import fnmatch
|
||||
import distutils.sysconfig
|
||||
import compileall
|
||||
|
@ -27,6 +28,7 @@ CXX=getenv("CXX", None)
|
|||
CC=getenv("CC", None)
|
||||
CPPFLAGS=getenv("CPPFLAGS", "")
|
||||
CXXFLAGS=getenv("CXXFLAGS", "")
|
||||
AR=getenv("AR", "ar")
|
||||
EXAMP_DEBUG_FLAG=''
|
||||
LDFLAGS=getenv("LDFLAGS", "")
|
||||
JNI_HOME=getenv("JNI_HOME", None)
|
||||
|
@ -499,8 +501,8 @@ def is64():
|
|||
def check_ar():
|
||||
if is_verbose():
|
||||
print("Testing ar...")
|
||||
if which('ar') is None:
|
||||
raise MKException('ar (archive tool) was not found')
|
||||
if which(AR) is None:
|
||||
raise MKException('%s (archive tool) was not found' % AR)
|
||||
|
||||
def find_cxx_compiler():
|
||||
global CXX, CXX_COMPILERS
|
||||
|
@ -1934,6 +1936,7 @@ class MLComponent(Component):
|
|||
else:
|
||||
out.write(' ' + os.path.join(self.sub_dir, m) + '.mli')
|
||||
out.write(' ' + os.path.join(self.sub_dir, m) + '.cmi')
|
||||
out.write(' ' + os.path.join(self.sub_dir, m) + '.cmx')
|
||||
out.write(' %s' % ((os.path.join(self.sub_dir, 'libz3ml$(LIB_EXT)'))))
|
||||
out.write(' %s' % ((os.path.join(self.sub_dir, 'z3ml$(LIB_EXT)'))))
|
||||
out.write(' %s' % ((os.path.join(self.sub_dir, 'z3ml.cma'))))
|
||||
|
@ -2034,7 +2037,7 @@ class DotNetExampleComponent(ExampleComponent):
|
|||
if VS_X64:
|
||||
out.write(' /platform:x64')
|
||||
elif VS_ARM:
|
||||
out.write(' /platform:arm')
|
||||
out.write(' /platform:arm')
|
||||
else:
|
||||
out.write(' /platform:x86')
|
||||
for csfile in get_cs_files(self.ex_dir):
|
||||
|
@ -2227,7 +2230,7 @@ def mk_config():
|
|||
static_opt = static_opt + 'd'
|
||||
config.write(
|
||||
'AR_FLAGS=/nologo\n'
|
||||
'LINK_FLAGS=/nologo %s\n'
|
||||
'LINK_FLAGS=/nologo %s\n'
|
||||
'SLINK_FLAGS=/nologo /LDd\n' % static_opt)
|
||||
if VS_X64:
|
||||
config.write(
|
||||
|
@ -2270,7 +2273,7 @@ def mk_config():
|
|||
'LINK_EXTRA_FLAGS=/link%s /DEBUG /MACHINE:X86 /SUBSYSTEM:CONSOLE /INCREMENTAL:NO /STACK:8388608 /OPT:REF /OPT:ICF /TLBID:1 /DYNAMICBASE /NXCOMPAT\n'
|
||||
'SLINK_EXTRA_FLAGS=/link%s /DEBUG /MACHINE:X86 /SUBSYSTEM:WINDOWS /INCREMENTAL:NO /STACK:8388608 /OPT:REF /OPT:ICF /TLBID:1 /DYNAMICBASE:NO\n' % (LTCG, LTCG))
|
||||
|
||||
|
||||
|
||||
|
||||
# End of Windows VS config.mk
|
||||
if is_verbose():
|
||||
|
@ -2387,7 +2390,7 @@ def mk_config():
|
|||
config.write('CXX_OUT_FLAG=-o \n')
|
||||
config.write('OBJ_EXT=.o\n')
|
||||
config.write('LIB_EXT=.a\n')
|
||||
config.write('AR=ar\n')
|
||||
config.write('AR=%s\n' % AR)
|
||||
config.write('AR_FLAGS=rcs\n')
|
||||
config.write('AR_OUTFLAG=\n')
|
||||
config.write('EXE_EXT=\n')
|
||||
|
@ -2408,6 +2411,7 @@ def mk_config():
|
|||
print('Host platform: %s' % sysname)
|
||||
print('C++ Compiler: %s' % CXX)
|
||||
print('C Compiler : %s' % CC)
|
||||
print('Archive Tool: %s' % AR)
|
||||
print('Arithmetic: %s' % ARITH)
|
||||
print('OpenMP: %s' % HAS_OMP)
|
||||
print('Prefix: %s' % PREFIX)
|
||||
|
@ -2503,7 +2507,7 @@ def mk_makefile():
|
|||
print("To build Z3, open a [Visual Studio x64 Command Prompt], then")
|
||||
elif VS_ARM:
|
||||
print(" platform: ARM\n")
|
||||
print("To build Z3, open a [Visual Studio ARM Command Prompt], then")
|
||||
print("To build Z3, open a [Visual Studio ARM Command Prompt], then")
|
||||
else:
|
||||
print(" platform: x86")
|
||||
print("To build Z3, open a [Visual Studio Command Prompt], then")
|
||||
|
@ -2521,104 +2525,6 @@ def mk_auto_src():
|
|||
mk_all_mem_initializer_cpps()
|
||||
mk_all_gparams_register_modules()
|
||||
|
||||
UINT = 0
|
||||
BOOL = 1
|
||||
DOUBLE = 2
|
||||
STRING = 3
|
||||
SYMBOL = 4
|
||||
UINT_MAX = 4294967295
|
||||
CURRENT_PYG_HPP_DEST_DIR = None
|
||||
|
||||
def get_current_pyg_hpp_dest_dir():
|
||||
return CURRENT_PYG_HPP_DEST_DIR
|
||||
|
||||
TYPE2CPK = { UINT : 'CPK_UINT', BOOL : 'CPK_BOOL', DOUBLE : 'CPK_DOUBLE', STRING : 'CPK_STRING', SYMBOL : 'CPK_SYMBOL' }
|
||||
TYPE2CTYPE = { UINT : 'unsigned', BOOL : 'bool', DOUBLE : 'double', STRING : 'char const *', SYMBOL : 'symbol' }
|
||||
TYPE2GETTER = { UINT : 'get_uint', BOOL : 'get_bool', DOUBLE : 'get_double', STRING : 'get_str', SYMBOL : 'get_sym' }
|
||||
|
||||
def pyg_default(p):
|
||||
if p[1] == BOOL:
|
||||
if p[2]:
|
||||
return "true"
|
||||
else:
|
||||
return "false"
|
||||
return p[2]
|
||||
|
||||
def pyg_default_as_c_literal(p):
|
||||
if p[1] == BOOL:
|
||||
if p[2]:
|
||||
return "true"
|
||||
else:
|
||||
return "false"
|
||||
elif p[1] == STRING:
|
||||
return '"%s"' % p[2]
|
||||
elif p[1] == SYMBOL:
|
||||
return 'symbol("%s")' % p[2]
|
||||
elif p[1] == UINT:
|
||||
return '%su' % p[2]
|
||||
else:
|
||||
return p[2]
|
||||
|
||||
def to_c_method(s):
|
||||
return s.replace('.', '_')
|
||||
|
||||
def def_module_params(module_name, export, params, class_name=None, description=None):
|
||||
dirname = get_current_pyg_hpp_dest_dir()
|
||||
assert(os.path.exists(dirname))
|
||||
if class_name is None:
|
||||
class_name = '%s_params' % module_name
|
||||
hpp = os.path.join(dirname, '%s.hpp' % class_name)
|
||||
out = open(hpp, 'w')
|
||||
out.write('// Automatically generated file\n')
|
||||
out.write('#ifndef __%s_HPP_\n' % class_name.upper())
|
||||
out.write('#define __%s_HPP_\n' % class_name.upper())
|
||||
out.write('#include"params.h"\n')
|
||||
if export:
|
||||
out.write('#include"gparams.h"\n')
|
||||
out.write('struct %s {\n' % class_name)
|
||||
out.write(' params_ref const & p;\n')
|
||||
if export:
|
||||
out.write(' params_ref g;\n')
|
||||
out.write(' %s(params_ref const & _p = params_ref::get_empty()):\n' % class_name)
|
||||
out.write(' p(_p)')
|
||||
if export:
|
||||
out.write(', g(gparams::get_module("%s"))' % module_name)
|
||||
out.write(' {}\n')
|
||||
out.write(' static void collect_param_descrs(param_descrs & d) {\n')
|
||||
for param in params:
|
||||
out.write(' d.insert("%s", %s, "%s", "%s","%s");\n' % (param[0], TYPE2CPK[param[1]], param[3], pyg_default(param), module_name))
|
||||
out.write(' }\n')
|
||||
if export:
|
||||
out.write(' /*\n')
|
||||
out.write(" REG_MODULE_PARAMS('%s', '%s::collect_param_descrs')\n" % (module_name, class_name))
|
||||
if description is not None:
|
||||
out.write(" REG_MODULE_DESCRIPTION('%s', '%s')\n" % (module_name, description))
|
||||
out.write(' */\n')
|
||||
# Generated accessors
|
||||
for param in params:
|
||||
if export:
|
||||
out.write(' %s %s() const { return p.%s("%s", g, %s); }\n' %
|
||||
(TYPE2CTYPE[param[1]], to_c_method(param[0]), TYPE2GETTER[param[1]], param[0], pyg_default_as_c_literal(param)))
|
||||
else:
|
||||
out.write(' %s %s() const { return p.%s("%s", %s); }\n' %
|
||||
(TYPE2CTYPE[param[1]], to_c_method(param[0]), TYPE2GETTER[param[1]], param[0], pyg_default_as_c_literal(param)))
|
||||
out.write('};\n')
|
||||
out.write('#endif\n')
|
||||
out.close()
|
||||
if is_verbose():
|
||||
print("Generated '%s'" % hpp)
|
||||
|
||||
def max_memory_param():
|
||||
return ('max_memory', UINT, UINT_MAX, 'maximum amount of memory in megabytes')
|
||||
|
||||
def max_steps_param():
|
||||
return ('max_steps', UINT, UINT_MAX, 'maximum number of steps')
|
||||
|
||||
PYG_GLOBALS = { 'UINT' : UINT, 'BOOL' : BOOL, 'DOUBLE' : DOUBLE, 'STRING' : STRING, 'SYMBOL' : SYMBOL,
|
||||
'UINT_MAX' : UINT_MAX,
|
||||
'max_memory_param' : max_memory_param,
|
||||
'max_steps_param' : max_steps_param,
|
||||
'def_module_params' : def_module_params }
|
||||
|
||||
def _execfile(file, globals=globals(), locals=locals()):
|
||||
if sys.version < "2.7":
|
||||
|
@ -2629,13 +2535,13 @@ def _execfile(file, globals=globals(), locals=locals()):
|
|||
|
||||
# Execute python auxiliary scripts that generate extra code for Z3.
|
||||
def exec_pyg_scripts():
|
||||
global CURRENT_PYG_HPP_DEST_DIR
|
||||
for root, dirs, files in os.walk('src'):
|
||||
for f in files:
|
||||
if f.endswith('.pyg'):
|
||||
script = os.path.join(root, f)
|
||||
CURRENT_PYG_HPP_DEST_DIR = root
|
||||
_execfile(script, PYG_GLOBALS)
|
||||
generated_file = mk_genfile_common.mk_hpp_from_pyg(script, root)
|
||||
if is_verbose():
|
||||
print("Generated '{}'".format(generated_file))
|
||||
|
||||
# TODO: delete after src/ast/pattern/expr_pattern_match
|
||||
# database.smt ==> database.h
|
||||
|
@ -2643,17 +2549,9 @@ def mk_pat_db():
|
|||
c = get_component(PATTERN_COMPONENT)
|
||||
fin = os.path.join(c.src_dir, 'database.smt2')
|
||||
fout = os.path.join(c.src_dir, 'database.h')
|
||||
mk_pat_db_internal(fin, fout)
|
||||
|
||||
def mk_pat_db_internal(inputFilePath, outputFilePath):
|
||||
with open(inputFilePath, 'r') as fin:
|
||||
with open(outputFilePath, 'w') as fout:
|
||||
fout.write('static char const g_pattern_database[] =\n')
|
||||
for line in fin:
|
||||
fout.write('"%s\\n"\n' % line.strip('\n'))
|
||||
fout.write(';\n')
|
||||
mk_genfile_common.mk_pat_db_internal(fin, fout)
|
||||
if VERBOSE:
|
||||
print("Generated '%s'" % outputFilePath)
|
||||
print("Generated '{}'".format(fout))
|
||||
|
||||
# Update version numbers
|
||||
def update_version():
|
||||
|
@ -2704,82 +2602,14 @@ def mk_all_assembly_infos(major, minor, build, revision):
|
|||
else:
|
||||
raise MKException("Failed to find assembly template info file '%s'" % assembly_info_template)
|
||||
|
||||
ADD_TACTIC_DATA=[]
|
||||
ADD_PROBE_DATA=[]
|
||||
|
||||
def ADD_TACTIC(name, descr, cmd):
|
||||
global ADD_TACTIC_DATA
|
||||
ADD_TACTIC_DATA.append((name, descr, cmd))
|
||||
|
||||
def ADD_PROBE(name, descr, cmd):
|
||||
global ADD_PROBE_DATA
|
||||
ADD_PROBE_DATA.append((name, descr, cmd))
|
||||
|
||||
# Generate an install_tactics.cpp at path.
|
||||
# This file implements the procedure
|
||||
# void install_tactics(tactic_manager & ctx)
|
||||
# It installs all tactics in the given component (name) list cnames
|
||||
# The procedure looks for ADD_TACTIC commands in the .h files of these components.
|
||||
def mk_install_tactic_cpp(cnames, path):
|
||||
component_src_dirs = []
|
||||
for cname in cnames:
|
||||
c = get_component(cname)
|
||||
component_src_dirs.append(c.src_dir)
|
||||
mk_install_tactic_cpp_internal(component_src_dirs, path)
|
||||
|
||||
def mk_install_tactic_cpp_internal(component_src_dirs, path):
|
||||
global ADD_TACTIC_DATA, ADD_PROBE_DATA
|
||||
ADD_TACTIC_DATA = []
|
||||
ADD_PROBE_DATA = []
|
||||
fullname = os.path.join(path, 'install_tactic.cpp')
|
||||
fout = open(fullname, 'w')
|
||||
fout.write('// Automatically generated file.\n')
|
||||
fout.write('#include"tactic.h"\n')
|
||||
fout.write('#include"tactic_cmds.h"\n')
|
||||
fout.write('#include"cmd_context.h"\n')
|
||||
tactic_pat = re.compile('[ \t]*ADD_TACTIC\(.*\)')
|
||||
probe_pat = re.compile('[ \t]*ADD_PROBE\(.*\)')
|
||||
for component_src_dir in component_src_dirs:
|
||||
h_files = filter(lambda f: f.endswith('.h') or f.endswith('.hpp'), os.listdir(component_src_dir))
|
||||
for h_file in h_files:
|
||||
added_include = False
|
||||
fin = open(os.path.join(component_src_dir, h_file), 'r')
|
||||
for line in fin:
|
||||
if tactic_pat.match(line):
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % h_file)
|
||||
try:
|
||||
exec(line.strip('\n '), globals())
|
||||
except:
|
||||
raise MKException("Failed processing ADD_TACTIC command at '%s'\n%s" % (fullname, line))
|
||||
if probe_pat.match(line):
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % h_file)
|
||||
try:
|
||||
exec(line.strip('\n '), globals())
|
||||
except:
|
||||
raise MKException("Failed processing ADD_PROBE command at '%s'\n%s" % (fullname, line))
|
||||
fin.close()
|
||||
# First pass will just generate the tactic factories
|
||||
idx = 0
|
||||
for data in ADD_TACTIC_DATA:
|
||||
fout.write('MK_SIMPLE_TACTIC_FACTORY(__Z3_local_factory_%s, %s);\n' % (idx, data[2]))
|
||||
idx = idx + 1
|
||||
fout.write('#define ADD_TACTIC_CMD(NAME, DESCR, FACTORY) ctx.insert(alloc(tactic_cmd, symbol(NAME), DESCR, alloc(FACTORY)))\n')
|
||||
fout.write('#define ADD_PROBE(NAME, DESCR, PROBE) ctx.insert(alloc(probe_info, symbol(NAME), DESCR, PROBE))\n')
|
||||
fout.write('void install_tactics(tactic_manager & ctx) {\n')
|
||||
idx = 0
|
||||
for data in ADD_TACTIC_DATA:
|
||||
fout.write(' ADD_TACTIC_CMD("%s", "%s", __Z3_local_factory_%s);\n' % (data[0], data[1], idx))
|
||||
idx = idx + 1
|
||||
for data in ADD_PROBE_DATA:
|
||||
fout.write(' ADD_PROBE("%s", "%s", %s);\n' % data)
|
||||
fout.write('}\n')
|
||||
fout.close()
|
||||
component_src_dirs = []
|
||||
for cname in cnames:
|
||||
c = get_component(cname)
|
||||
component_src_dirs.append(c.src_dir)
|
||||
generated_file = mk_genfile_common.mk_install_tactic_cpp_internal(component_src_dirs, path)
|
||||
if VERBOSE:
|
||||
print("Generated '%s'" % fullname)
|
||||
print("Generated '{}'".format(generated_file))
|
||||
|
||||
def mk_all_install_tactic_cpps():
|
||||
if not ONLY_MAKEFILES:
|
||||
|
@ -2790,67 +2620,14 @@ def mk_all_install_tactic_cpps():
|
|||
cnames.append(c.name)
|
||||
mk_install_tactic_cpp(cnames, c.src_dir)
|
||||
|
||||
# Generate an mem_initializer.cpp at path.
|
||||
# This file implements the procedures
|
||||
# void mem_initialize()
|
||||
# void mem_finalize()
|
||||
# These procedures are invoked by the Z3 memory_manager
|
||||
def mk_mem_initializer_cpp(cnames, path):
|
||||
component_src_dirs = []
|
||||
for cname in cnames:
|
||||
c = get_component(cname)
|
||||
component_src_dirs.append(c.src_dir)
|
||||
mk_mem_initializer_cpp_internal(component_src_dirs, path)
|
||||
|
||||
def mk_mem_initializer_cpp_internal(component_src_dirs, path):
|
||||
initializer_cmds = []
|
||||
finalizer_cmds = []
|
||||
fullname = os.path.join(path, 'mem_initializer.cpp')
|
||||
fout = open(fullname, 'w')
|
||||
fout.write('// Automatically generated file.\n')
|
||||
initializer_pat = re.compile('[ \t]*ADD_INITIALIZER\(\'([^\']*)\'\)')
|
||||
# ADD_INITIALIZER with priority
|
||||
initializer_prio_pat = re.compile('[ \t]*ADD_INITIALIZER\(\'([^\']*)\',[ \t]*(-?[0-9]*)\)')
|
||||
finalizer_pat = re.compile('[ \t]*ADD_FINALIZER\(\'([^\']*)\'\)')
|
||||
for component_src_dir in component_src_dirs:
|
||||
h_files = filter(lambda f: f.endswith('.h') or f.endswith('.hpp'), os.listdir(component_src_dir))
|
||||
for h_file in h_files:
|
||||
added_include = False
|
||||
fin = open(os.path.join(component_src_dir, h_file), 'r')
|
||||
for line in fin:
|
||||
m = initializer_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % h_file)
|
||||
initializer_cmds.append((m.group(1), 0))
|
||||
m = initializer_prio_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % h_file)
|
||||
initializer_cmds.append((m.group(1), int(m.group(2))))
|
||||
m = finalizer_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % h_file)
|
||||
finalizer_cmds.append(m.group(1))
|
||||
fin.close()
|
||||
initializer_cmds.sort(key=lambda tup: tup[1])
|
||||
fout.write('void mem_initialize() {\n')
|
||||
for (cmd, prio) in initializer_cmds:
|
||||
fout.write(cmd)
|
||||
fout.write('\n')
|
||||
fout.write('}\n')
|
||||
fout.write('void mem_finalize() {\n')
|
||||
for cmd in finalizer_cmds:
|
||||
fout.write(cmd)
|
||||
fout.write('\n')
|
||||
fout.write('}\n')
|
||||
fout.close()
|
||||
component_src_dirs = []
|
||||
for cname in cnames:
|
||||
c = get_component(cname)
|
||||
component_src_dirs.append(c.src_dir)
|
||||
generated_file = mk_genfile_common.mk_mem_initializer_cpp_internal(component_src_dirs, path)
|
||||
if VERBOSE:
|
||||
print("Generated '%s'" % fullname)
|
||||
print("Generated '{}'".format(generated_file))
|
||||
|
||||
def mk_all_mem_initializer_cpps():
|
||||
if not ONLY_MAKEFILES:
|
||||
|
@ -2861,61 +2638,14 @@ def mk_all_mem_initializer_cpps():
|
|||
cnames.append(c.name)
|
||||
mk_mem_initializer_cpp(cnames, c.src_dir)
|
||||
|
||||
# Generate an ``gparams_register_modules.cpp`` at path.
|
||||
# This file implements the procedure
|
||||
# void gparams_register_modules()
|
||||
# This procedure is invoked by gparams::init()
|
||||
def mk_gparams_register_modules(cnames, path):
|
||||
component_src_dirs = []
|
||||
for cname in cnames:
|
||||
c = get_component(cname)
|
||||
component_src_dirs.append(c.src_dir)
|
||||
mk_gparams_register_modules_internal(component_src_dirs, path)
|
||||
|
||||
def mk_gparams_register_modules_internal(component_src_dirs, path):
|
||||
cmds = []
|
||||
mod_cmds = []
|
||||
mod_descrs = []
|
||||
fullname = os.path.join(path, 'gparams_register_modules.cpp')
|
||||
fout = open(fullname, 'w')
|
||||
fout.write('// Automatically generated file.\n')
|
||||
fout.write('#include"gparams.h"\n')
|
||||
reg_pat = re.compile('[ \t]*REG_PARAMS\(\'([^\']*)\'\)')
|
||||
reg_mod_pat = re.compile('[ \t]*REG_MODULE_PARAMS\(\'([^\']*)\', *\'([^\']*)\'\)')
|
||||
reg_mod_descr_pat = re.compile('[ \t]*REG_MODULE_DESCRIPTION\(\'([^\']*)\', *\'([^\']*)\'\)')
|
||||
for component_src_dir in component_src_dirs:
|
||||
h_files = filter(lambda f: f.endswith('.h') or f.endswith('.hpp'), os.listdir(component_src_dir))
|
||||
for h_file in h_files:
|
||||
added_include = False
|
||||
fin = open(os.path.join(component_src_dir, h_file), 'r')
|
||||
for line in fin:
|
||||
m = reg_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % h_file)
|
||||
cmds.append((m.group(1)))
|
||||
m = reg_mod_pat.match(line)
|
||||
if m:
|
||||
if not added_include:
|
||||
added_include = True
|
||||
fout.write('#include"%s"\n' % h_file)
|
||||
mod_cmds.append((m.group(1), m.group(2)))
|
||||
m = reg_mod_descr_pat.match(line)
|
||||
if m:
|
||||
mod_descrs.append((m.group(1), m.group(2)))
|
||||
fin.close()
|
||||
fout.write('void gparams_register_modules() {\n')
|
||||
for code in cmds:
|
||||
fout.write('{ param_descrs d; %s(d); gparams::register_global(d); }\n' % code)
|
||||
for (mod, code) in mod_cmds:
|
||||
fout.write('{ param_descrs * d = alloc(param_descrs); %s(*d); gparams::register_module("%s", d); }\n' % (code, mod))
|
||||
for (mod, descr) in mod_descrs:
|
||||
fout.write('gparams::register_module_descr("%s", "%s");\n' % (mod, descr))
|
||||
fout.write('}\n')
|
||||
fout.close()
|
||||
component_src_dirs = []
|
||||
for cname in cnames:
|
||||
c = get_component(cname)
|
||||
component_src_dirs.append(c.src_dir)
|
||||
generated_file = mk_genfile_common.mk_gparams_register_modules_internal(component_src_dirs, path)
|
||||
if VERBOSE:
|
||||
print("Generated '%s'" % fullname)
|
||||
print("Generated '{}'".format(generated_file))
|
||||
|
||||
def mk_all_gparams_register_modules():
|
||||
if not ONLY_MAKEFILES:
|
||||
|
@ -2935,28 +2665,7 @@ def mk_def_file(c):
|
|||
dot_h_c = c.find_file(dot_h, c.name)
|
||||
api = os.path.join(dot_h_c.src_dir, dot_h)
|
||||
export_header_files.append(api)
|
||||
mk_def_file_internal(defname, dll_name, export_header_files)
|
||||
|
||||
def mk_def_file_internal(defname, dll_name, export_header_files):
|
||||
pat1 = re.compile(".*Z3_API.*")
|
||||
fout = open(defname, 'w')
|
||||
fout.write('LIBRARY "%s"\nEXPORTS\n' % dll_name)
|
||||
num = 1
|
||||
for export_header_file in export_header_files:
|
||||
api = open(export_header_file, 'r')
|
||||
for line in api:
|
||||
m = pat1.match(line)
|
||||
if m:
|
||||
words = re.split('\W+', line)
|
||||
i = 0
|
||||
for w in words:
|
||||
if w == 'Z3_API':
|
||||
f = words[i+1]
|
||||
fout.write('\t%s @%s\n' % (f, num))
|
||||
i = i + 1
|
||||
num = num + 1
|
||||
api.close()
|
||||
fout.close()
|
||||
mk_genfile_common.mk_def_file_internal(defname, dll_name, export_header_files)
|
||||
if VERBOSE:
|
||||
print("Generated '%s'" % defname)
|
||||
|
||||
|
@ -3049,174 +2758,21 @@ def mk_z3consts_py(api_files):
|
|||
api_file_c = api_dll.find_file(api_file, api_dll.name)
|
||||
api_file = os.path.join(api_file_c.src_dir, api_file)
|
||||
full_path_api_files.append(api_file)
|
||||
mk_z3consts_py_internal(full_path_api_files, Z3PY_SRC_DIR)
|
||||
|
||||
def mk_z3consts_py_internal(api_files, output_dir):
|
||||
assert os.path.isdir(output_dir)
|
||||
assert isinstance(api_files, list)
|
||||
|
||||
blank_pat = re.compile("^ *\r?$")
|
||||
comment_pat = re.compile("^ *//.*$")
|
||||
typedef_pat = re.compile("typedef enum *")
|
||||
typedef2_pat = re.compile("typedef enum { *")
|
||||
openbrace_pat = re.compile("{ *")
|
||||
closebrace_pat = re.compile("}.*;")
|
||||
|
||||
z3consts = open(os.path.join(output_dir, 'z3consts.py'), 'w')
|
||||
z3consts.write('# Automatically generated file\n\n')
|
||||
for api_file in api_files:
|
||||
api = open(api_file, 'r')
|
||||
|
||||
SEARCHING = 0
|
||||
FOUND_ENUM = 1
|
||||
IN_ENUM = 2
|
||||
|
||||
mode = SEARCHING
|
||||
decls = {}
|
||||
idx = 0
|
||||
|
||||
linenum = 1
|
||||
for line in api:
|
||||
m1 = blank_pat.match(line)
|
||||
m2 = comment_pat.match(line)
|
||||
if m1 or m2:
|
||||
# skip blank lines and comments
|
||||
linenum = linenum + 1
|
||||
elif mode == SEARCHING:
|
||||
m = typedef_pat.match(line)
|
||||
if m:
|
||||
mode = FOUND_ENUM
|
||||
m = typedef2_pat.match(line)
|
||||
if m:
|
||||
mode = IN_ENUM
|
||||
decls = {}
|
||||
idx = 0
|
||||
elif mode == FOUND_ENUM:
|
||||
m = openbrace_pat.match(line)
|
||||
if m:
|
||||
mode = IN_ENUM
|
||||
decls = {}
|
||||
idx = 0
|
||||
else:
|
||||
assert False, "Invalid %s, line: %s" % (api_file, linenum)
|
||||
else:
|
||||
assert mode == IN_ENUM
|
||||
words = re.split('[^\-a-zA-Z0-9_]+', line)
|
||||
m = closebrace_pat.match(line)
|
||||
if m:
|
||||
name = words[1]
|
||||
z3consts.write('# enum %s\n' % name)
|
||||
for k in decls:
|
||||
i = decls[k]
|
||||
z3consts.write('%s = %s\n' % (k, i))
|
||||
z3consts.write('\n')
|
||||
mode = SEARCHING
|
||||
elif len(words) <= 2:
|
||||
assert False, "Invalid %s, line: %s" % (api_file, linenum)
|
||||
else:
|
||||
if words[2] != '':
|
||||
if len(words[2]) > 1 and words[2][1] == 'x':
|
||||
idx = int(words[2], 16)
|
||||
else:
|
||||
idx = int(words[2])
|
||||
decls[words[1]] = idx
|
||||
idx = idx + 1
|
||||
linenum = linenum + 1
|
||||
api.close()
|
||||
z3consts.close()
|
||||
generated_file = mk_genfile_common.mk_z3consts_py_internal(full_path_api_files, Z3PY_SRC_DIR)
|
||||
if VERBOSE:
|
||||
print("Generated '%s'" % os.path.join(output_dir, 'z3consts.py'))
|
||||
|
||||
print("Generated '{}".format(generated_file))
|
||||
|
||||
# Extract enumeration types from z3_api.h, and add .Net definitions
|
||||
def mk_z3consts_dotnet(api_files):
|
||||
blank_pat = re.compile("^ *\r?$")
|
||||
comment_pat = re.compile("^ *//.*$")
|
||||
typedef_pat = re.compile("typedef enum *")
|
||||
typedef2_pat = re.compile("typedef enum { *")
|
||||
openbrace_pat = re.compile("{ *")
|
||||
closebrace_pat = re.compile("}.*;")
|
||||
|
||||
dotnet = get_component(DOTNET_COMPONENT)
|
||||
|
||||
DeprecatedEnums = [ 'Z3_search_failure' ]
|
||||
z3consts = open(os.path.join(dotnet.src_dir, 'Enumerations.cs'), 'w')
|
||||
z3consts.write('// Automatically generated file\n\n')
|
||||
z3consts.write('using System;\n\n'
|
||||
'#pragma warning disable 1591\n\n'
|
||||
'namespace Microsoft.Z3\n'
|
||||
'{\n')
|
||||
|
||||
full_path_api_files = []
|
||||
for api_file in api_files:
|
||||
api_file_c = dotnet.find_file(api_file, dotnet.name)
|
||||
api_file = os.path.join(api_file_c.src_dir, api_file)
|
||||
|
||||
api = open(api_file, 'r')
|
||||
|
||||
SEARCHING = 0
|
||||
FOUND_ENUM = 1
|
||||
IN_ENUM = 2
|
||||
|
||||
mode = SEARCHING
|
||||
decls = {}
|
||||
idx = 0
|
||||
|
||||
linenum = 1
|
||||
for line in api:
|
||||
m1 = blank_pat.match(line)
|
||||
m2 = comment_pat.match(line)
|
||||
if m1 or m2:
|
||||
# skip blank lines and comments
|
||||
linenum = linenum + 1
|
||||
elif mode == SEARCHING:
|
||||
m = typedef_pat.match(line)
|
||||
if m:
|
||||
mode = FOUND_ENUM
|
||||
m = typedef2_pat.match(line)
|
||||
if m:
|
||||
mode = IN_ENUM
|
||||
decls = {}
|
||||
idx = 0
|
||||
elif mode == FOUND_ENUM:
|
||||
m = openbrace_pat.match(line)
|
||||
if m:
|
||||
mode = IN_ENUM
|
||||
decls = {}
|
||||
idx = 0
|
||||
else:
|
||||
assert False, "Invalid %s, line: %s" % (api_file, linenum)
|
||||
else:
|
||||
assert mode == IN_ENUM
|
||||
words = re.split('[^\-a-zA-Z0-9_]+', line)
|
||||
m = closebrace_pat.match(line)
|
||||
if m:
|
||||
name = words[1]
|
||||
if name not in DeprecatedEnums:
|
||||
z3consts.write(' /// <summary>%s</summary>\n' % name)
|
||||
z3consts.write(' public enum %s {\n' % name)
|
||||
z3consts.write
|
||||
for k in decls:
|
||||
i = decls[k]
|
||||
z3consts.write(' %s = %s,\n' % (k, i))
|
||||
z3consts.write(' }\n\n')
|
||||
mode = SEARCHING
|
||||
elif len(words) <= 2:
|
||||
assert False, "Invalid %s, line: %s" % (api_file, linenum)
|
||||
else:
|
||||
if words[2] != '':
|
||||
if len(words[2]) > 1 and words[2][1] == 'x':
|
||||
idx = int(words[2], 16)
|
||||
else:
|
||||
idx = int(words[2])
|
||||
decls[words[1]] = idx
|
||||
idx = idx + 1
|
||||
linenum = linenum + 1
|
||||
api.close()
|
||||
z3consts.write('}\n');
|
||||
z3consts.close()
|
||||
full_path_api_files.append(api_file)
|
||||
generated_file = mk_genfile_common.mk_z3consts_dotnet_internal(full_path_api_files, dotnet.src_dir)
|
||||
if VERBOSE:
|
||||
print("Generated '%s'" % os.path.join(dotnet.src_dir, 'Enumerations.cs'))
|
||||
|
||||
print("Generated '{}".format(generated_file))
|
||||
|
||||
# Extract enumeration types from z3_api.h, and add Java definitions
|
||||
def mk_z3consts_java(api_files):
|
||||
|
@ -3777,7 +3333,7 @@ class MakeRuleCmd(object):
|
|||
cls.write_cmd(out, "mkdir -p {install_root}{dir}".format(
|
||||
install_root=install_root,
|
||||
dir=dir))
|
||||
|
||||
|
||||
@classmethod
|
||||
def _is_path_prefix_of(cls, temp_path, target_as_abs):
|
||||
"""
|
||||
|
@ -3906,4 +3462,3 @@ def configure_file(template_file_path, output_file_path, substitutions):
|
|||
if __name__ == '__main__':
|
||||
import doctest
|
||||
doctest.testmod()
|
||||
|
||||
|
|
|
@ -4,39 +4,33 @@ Reads a pyg file and emits the corresponding
|
|||
C++ header file into the specified destination
|
||||
directory.
|
||||
"""
|
||||
import mk_util
|
||||
import mk_genfile_common
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
def main(args):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("pyg_file", help="pyg file")
|
||||
parser.add_argument("destination_dir", help="destination directory")
|
||||
pargs = parser.parse_args(args)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("pyg_file", help="pyg file")
|
||||
parser.add_argument("destination_dir", help="destination directory")
|
||||
pargs = parser.parse_args(args)
|
||||
|
||||
if not os.path.exists(pargs.pyg_file):
|
||||
logging.error('"{}" does not exist'.format(pargs.pyg_file))
|
||||
return 1
|
||||
if not os.path.exists(pargs.pyg_file):
|
||||
logging.error('"{}" does not exist'.format(pargs.pyg_file))
|
||||
return 1
|
||||
|
||||
if not os.path.exists(pargs.destination_dir):
|
||||
logging.error('"{}" does not exist'.format(pargs.destination_dir))
|
||||
return 1
|
||||
if not mk_genfile_common.check_dir_exists(pargs.destination_dir):
|
||||
return 1
|
||||
|
||||
if not os.path.isdir(pargs.destination_dir):
|
||||
logging.error('"{}" is not a directory'.format(pargs.destination_dir))
|
||||
return 1
|
||||
|
||||
pyg_full_path = os.path.abspath(pargs.pyg_file)
|
||||
destination_dir_full_path = os.path.abspath(pargs.destination_dir)
|
||||
logging.info('Using {}'.format(pyg_full_path))
|
||||
# Use the existing infrastructure to do this
|
||||
mk_util.CURRENT_PYG_HPP_DEST_DIR = destination_dir_full_path
|
||||
mk_util._execfile(pyg_full_path, mk_util.PYG_GLOBALS)
|
||||
return 0
|
||||
pyg_full_path = os.path.abspath(pargs.pyg_file)
|
||||
destination_dir_full_path = os.path.abspath(pargs.destination_dir)
|
||||
logging.info('Using {}'.format(pyg_full_path))
|
||||
output = mk_genfile_common.mk_hpp_from_pyg(pyg_full_path, destination_dir_full_path)
|
||||
logging.info('Generated "{}"'.format(output))
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue