3
0
Fork 0
mirror of https://github.com/Z3Prover/z3 synced 2025-04-19 15:19:01 +00:00

Merge branch 'master' into sls

This commit is contained in:
Nikolaj Bjorner 2024-10-11 09:56:46 -07:00
commit 5d9d134151
406 changed files with 14324 additions and 8359 deletions

View file

@ -3,6 +3,8 @@ name: Code Coverage
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
schedule:
- cron: "0 11 * * *"

View file

@ -41,7 +41,7 @@ jobs:
type=edge
type=sha,prefix=ubuntu-20.04-bare-z3-sha-
- name: Build and push Bare Z3 Docker Image
uses: docker/build-push-action@v6.3.0
uses: docker/build-push-action@v6.9.0
with:
context: .
push: true

View file

@ -0,0 +1,23 @@
name: MSVC Clang-CL Static Build
on:
push:
pull_request:
permissions:
contents: read # to fetch code (actions/checkout)
jobs:
build:
runs-on: windows-2019
env:
BUILD_TYPE: Release
steps:
- name: Checkout Repo
uses: actions/checkout@v4
- name: Build
run: |
cmake -B build -DCMAKE_BUILD_TYPE=${{ env.BUILD_TYPE }} -DZ3_BUILD_LIBZ3_SHARED=OFF -DZ3_BUILD_LIBZ3_MSVC_STATIC=ON -T ClangCL -DCMAKE_C_FLAGS="/EHsc" -DCMAKE_CXX_FLAGS="/EHsc"
cmake --build build --config ${{ env.BUILD_TYPE }} --parallel

5
.gitignore vendored
View file

@ -8,6 +8,11 @@ callgrind.out.*
*.hpp
.env
.z3-trace
.env
.genaiscript
package-lock.json
package.json
node_modules
# OCaml generated files
*.a
*.o

3
.gitignore.genai Normal file
View file

@ -0,0 +1,3 @@
**/genaiscript.d.ts
**/package-lock.json
**/yarn.lock

View file

@ -2,7 +2,7 @@
cmake_minimum_required(VERSION 3.16)
set(CMAKE_USER_MAKE_RULES_OVERRIDE_CXX "${CMAKE_CURRENT_SOURCE_DIR}/cmake/cxx_compiler_flags_overrides.cmake")
project(Z3 VERSION 4.13.1.0 LANGUAGES CXX)
project(Z3 VERSION 4.13.3.0 LANGUAGES CXX)
################################################################################
# Project version
@ -178,7 +178,7 @@ include(${PROJECT_SOURCE_DIR}/cmake/z3_add_cxx_flag.cmake)
################################################################################
# C++ language version
################################################################################
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
################################################################################

View file

@ -163,7 +163,7 @@ See [``examples/ml``](examples/ml) for examples.
### ``Python``
You can install the Python wrapper for Z3 for the latest release from pypi using the command
You can install the Python wrapper for Z3 for the latest release from pypi using the command:
```bash
pip install z3-solver
@ -206,7 +206,7 @@ See [``examples/python``](examples/python) for examples.
### ``Julia``
The Julia package [Z3.jl](https://github.com/ahumenberger/Z3.jl) wraps the C++ API of Z3. Information about updating and building the Julia bindings can be found in [src/api/julia](src/api/julia).
The Julia package [Z3.jl](https://github.com/ahumenberger/Z3.jl) wraps the C API of Z3. A previous version of it wrapped the C++ API: Information about updating and building the Julia bindings can be found in [src/api/julia](src/api/julia).
### ``Web Assembly`` / ``TypeScript`` / ``JavaScript``
@ -236,4 +236,7 @@ to Z3's C API. For more information, see [MachineArithmetic/README.md](https://g
* [Julia](https://github.com/ahumenberger/Z3.jl)
* [Smalltalk](https://github.com/shingarov/MachineArithmetic/blob/pure-z3/MachineArithmetic/README.md) (supports Pharo and Smalltalk/X)
## Power Tools
* The [Axiom Profiler](https://github.com/viperproject/axiom-profiler-2) currently developed by ETH Zurich

View file

@ -10,6 +10,31 @@ Version 4.next
- native word level bit-vector solving.
- introduction of simple induction lemmas to handle a limited repertoire of induction proofs.
Version 4.13.2
==============
- Performance regression fix. #7404
Version 4.13.1
==============
- single-sample cell projection in nlsat was designed by Haokun Li and Bican Xia.
- using simple-checker together with and variable ordering supported by qfnra_tactic was developed by Mengyu Zhao (Linxi) and Shaowei Cai.
The projection is described in paper by Haokun Li and Bican Xia, [Solving Satisfiability of Polynomial Formulas By Sample - Cell Projection](https://arxiv.org/abs/2003.00409). The code ported from https://github.com/hybridSMT/hybridSMT.git
- Add API for providing hints for the solver/optimize contexts for which initial values to attempt to use for variables.
The new API function are Z3_solver_set_initial_value and Z3_optimize_set_initial_value, respectively. Supply these functions with a Boolean or numeric variable, and a value. The solver will then attempt to use these values in the initial phase of search. The feature is aimed at resolving nearly similar problems, or problems with a predicted model and the intent is that restarting the solver based on a near solution can avoid prune the space of constraints that are initially infeasible.
The SMTLIB front-end contains the new command (set-initial-value var value). For example,
(declare-const x Int)
(set-initial-value x 10)
(push)
(assert (> x 0))
(check-sat)
(get-model)
produces a model where x = 10. We use (push) to ensure that z3 doesn't run a
specialized pre-processor that eliminates x, which renders the initialization
without effect.
Version 4.13.0
==============
- add ARM64 wheels for Python, thanks to Steven Moy, smoy

View file

@ -43,8 +43,20 @@ jobs:
- ${{if eq(variables['runRegressions'], 'True')}}:
- template: scripts/test-regressions.yml
- job: LinuxBuildsArm64
displayName: "ManyLinux ARM64 build"
- job: "ManylinuxPythonBuildAmd64"
displayName: "Python bindings (manylinux Centos AMD64) build"
pool:
vmImage: "ubuntu-latest"
container: "quay.io/pypa/manylinux2014_x86_64:latest"
steps:
- script: "/opt/python/cp38-cp38/bin/python -m venv $PWD/env"
- script: 'echo "##vso[task.prependpath]$PWD/env/bin"'
- script: "pip install build git+https://github.com/rhelmot/auditwheel"
- script: "cd src/api/python && python -m build && AUDITWHEEL_PLAT= auditwheel repair --best-plat dist/*.whl && cd ../../.."
- script: "pip install ./src/api/python/wheelhouse/*.whl && python - <src/api/python/z3test.py z3 && python - <src/api/python/z3test.py z3num"
- job: ManyLinuxPythonBuildArm64
displayName: "Python bindings (manylinux Centos ARM64 cross) build"
variables:
name: ManyLinux
python: "/opt/python/cp37-cp37m/bin/python"
@ -55,22 +67,19 @@ jobs:
- script: curl -L -o /tmp/arm-toolchain.tar.xz 'https://developer.arm.com/-/media/Files/downloads/gnu/11.2-2022.02/binrel/gcc-arm-11.2-2022.02-x86_64-aarch64-none-linux-gnu.tar.xz?rev=33c6e30e5ac64e6dba8f0431f2c35f1b&hash=9918A05BF47621B632C7A5C8D2BB438FB80A4480'
- script: mkdir -p /tmp/arm-toolchain/
- script: tar xf /tmp/arm-toolchain.tar.xz -C /tmp/arm-toolchain/ --strip-components=1
- script: "/opt/python/cp38-cp38/bin/python -m venv $PWD/env"
- script: 'echo "##vso[task.prependpath]$PWD/env/bin"'
- script: echo '##vso[task.prependpath]/tmp/arm-toolchain/bin'
- script: echo '##vso[task.prependpath]/tmp/arm-toolchain/aarch64-none-linux-gnu/libc/usr/bin'
- script: echo $PATH
- script: stat /tmp/arm-toolchain/bin/aarch64-none-linux-gnu-gcc
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --nodotnet --nojava --arch=arm64
pythonInterpreter: $(python)
- script: "stat `which aarch64-none-linux-gnu-gcc`"
- script: "pip install build git+https://github.com/rhelmot/auditwheel"
- script: "cd src/api/python && CC=aarch64-none-linux-gnu-gcc CXX=aarch64-none-linux-gnu-g++ AR=aarch64-none-linux-gnu-ar LD=aarch64-none-linux-gnu-ld python -m build && AUDITWHEEL_PLAT= auditwheel repair --best-plat dist/*.whl && cd ../../.."
- job: "Ubuntu20OCaml"
displayName: "Ubuntu 20 with OCaml"
- job: "UbuntuOCaml"
displayName: "Ubuntu with OCaml"
pool:
vmImage: "Ubuntu-20.04"
vmImage: "Ubuntu-latest"
steps:
- script: sudo apt-get install ocaml opam libgmp-dev
- script: opam init -y
@ -90,10 +99,10 @@ jobs:
- template: scripts/generate-doc.yml
- job: "Ubuntu20OCamlStatic"
displayName: "Ubuntu 20 with OCaml on z3-static"
- job: "UbuntuOCamlStatic"
displayName: "Ubuntu with OCaml on z3-static"
pool:
vmImage: "Ubuntu-20.04"
vmImage: "Ubuntu-latest"
steps:
- script: sudo apt-get install ocaml opam libgmp-dev
- script: opam init -y
@ -236,7 +245,7 @@ jobs:
vmImage: "macOS-latest"
steps:
- script: brew install ninja
# - script: brew install --cask julia
- script: brew install --cask julia
- script: |
julia -e "using Pkg; Pkg.add(PackageSpec(name=\"libcxxwrap_julia_jll\"))"
JlCxxDir=$(julia -e "using libcxxwrap_julia_jll; println(joinpath(dirname(libcxxwrap_julia_jll.libcxxwrap_julia_path), \"cmake\", \"JlCxx\"))")

View file

@ -165,6 +165,15 @@ Z3_ast mk_int_var(Z3_context ctx, const char * name)
return mk_var(ctx, name, ty);
}
/**
\brief Create a string variable using the given name.
*/
Z3_ast mk_string_var(Z3_context ctx, const char * name)
{
Z3_sort ty = Z3_mk_string_sort(ctx);
return mk_var(ctx, name, ty);
}
/**
\brief Create a Z3 integer node using a C int.
*/
@ -1615,7 +1624,7 @@ void error_code_example2() {
Z3_del_config(cfg);
x = mk_int_var(ctx, "x");
y = mk_bool_var(ctx, "y");
y = mk_string_var(ctx, "y");
printf("before Z3_mk_iff\n");
/* the next call will produce an error */
app = Z3_mk_iff(ctx, x, y);

1
genaisrc/.gitattributes vendored Normal file
View file

@ -0,0 +1 @@
genaiscript.d.ts -diff merge=ours linguist-generated

View file

@ -0,0 +1,21 @@
def("FILE", env.files)
def("ERR", "/home/nbjorner/z3/src/nlsat/nlsat_simple_checker.cpp: In member function bool nlsat::simple_checker::imp::Endpoint::operator==(const nlsat::simple_checker::imp::Endpoint&) const:\
/home/nbjorner/z3/src/nlsat/nlsat_simple_checker.cpp:63:82: warning: C++20 says that these are ambiguous, even though the second is reversed:\
63 | if (!m_inf && !rhs.m_inf && m_open == rhs.m_open && m_val == rhs.m_val) {\
| ^~~~~\
In file included from /home/nbjorner/z3/src/util/mpz.h:26,\
from /home/nbjorner/z3/src/util/mpq.h:21,\
from /home/nbjorner/z3/src/util/rational.h:21,\
from /home/nbjorner/z3/src/math/polynomial/algebraic_numbers.h:21,\
from /home/nbjorner/z3/src/nlsat/nlsat_simple_checker.h:20,\
from /home/nbjorner/z3/src/nlsat/nlsat_simple_checker.cpp:1:\
/home/nbjorner/z3/src/util/scoped_numeral.h:96:17: note: candidate 1: bool operator==(const _scoped_numeral<algebraic_numbers::manager>&, const _scoped_numeral<algebraic_numbers::manager>::numeral&)\
96 | friend bool operator==(_scoped_numeral const & a, numeral const & b) {\
| ^~~~~~~~\
/home/nbjorner/z3/src/util/scoped_numeral.h:96:17: note: candidate 2: bool operator==(const _scoped_numeral<algebraic_numbers::manager>&, const _scoped_numeral<algebraic_numbers::manager>::numeral&) (reversed)")
$`You are an expert C++ programmer.
Your task is to fix the compilation bug reported in the error message ERR.
How should FILE be changed to fix the error message?`

17
genaisrc/gai.genai.mts Normal file
View file

@ -0,0 +1,17 @@
script({
tools: ["agent_fs", "agent_git", "agent_github"],
})
const {
workflow = "latest failed",
failure_run_id = "latest",
branch = await git.defaultBranch(),
} = env.vars
$`Investigate the status of the ${workflow} workflow and identify the root cause of the failure of run ${failure_run_id} in branch ${branch}.
- Correlate the failure with the relevant commits, pull requests or issues.
- Compare the source code between the failed run commit and the last successful run commit before that run.
In your report, include html links to the relevant runs, commits, pull requests or issues.
`

View file

@ -1,81 +1,75 @@
import { select, input, confirm } from "@inquirer/prompts"
/**
* git commit flow with auto-generated commit message
*/
script({
title: "git commit message",
description: "Generate a commit message for all staged changes",
})
// Check for staged changes and stage all changes if none are staged
let diff = await host.exec("git", ["diff", "--cached"])
if (!diff.stdout) {
const stage = await confirm({
message: "No staged changes. Stage all changes?",
default: true,
})
if (stage) {
await host.exec("git", ["add", "."])
diff = await host.exec("git", [
"diff",
"--cached",
"--",
".",
":!**/genaiscript.d.ts",
])
}
if (!diff.stdout) cancel("no staged changes")
}
const diff = await git.diff({
staged: true,
excludedPaths: "**/genaiscript.d.ts",
askStageOnEmpty: true,
})
if (!diff) cancel("no staged changes")
console.log(diff.stdout)
// show diff in the console
console.log(diff)
let choice
let message
do {
// Generate commit message
message = (
await runPrompt(
(_) => {
_.def("GIT_DIFF", diff, { maxTokens: 20000 })
_.$`GIT_DIFF is a diff of all staged changes, coming from the command:
const res = await runPrompt(
(_) => {
_.def("GIT_DIFF", diff, { maxTokens: 20000 })
_.$`GIT_DIFF is a diff of all staged changes, coming from the command:
\`\`\`
git diff --cached
\`\`\`
Please generate a concise, one-line commit message for these changes.
- do NOT add quotes`
},
{ cache: false, temperature: 0.8 }
)
).text
- do NOT add quotes
` // TODO: add a better prompt
},
{ cache: false, temperature: 0.8 }
)
if (res.error) throw res.error
message = res.text
if (!message) {
console.log("No message generated, did you configure the LLM model?")
break
}
// Prompt user for commit message
choice = await select({
message,
choices: [
{
name: "commit",
value: "commit",
description: "accept message and commit",
},
{
name: "edit",
value: "edit",
description: "edit message and commit",
},
{
name: "regenerate",
value: "regenerate",
description: "regenerate message",
},
],
})
choice = await host.select(message, [
{
value: "commit",
description: "accept message and commit",
},
{
value: "edit",
description: "edit message and commit",
},
{
value: "regenerate",
description: "regenerate message",
},
])
// Handle user choice
if (choice === "edit") {
message = await input({
message: "Edit commit message",
message = await host.input("Edit commit message", {
required: true,
})
choice = "commit"
}
// Regenerate message
if (choice === "commit" && message) {
console.log((await host.exec("git", ["commit", "-m", message])).stdout)
if (await confirm({ message: "Push changes?", default: true }))
console.log((await host.exec("git", ["push"])).stdout)
console.log(await git.exec(["commit", "-m", message]))
if (await host.confirm("Push changes?", { default: true }))
console.log(await git.exec("push"))
break
}
} while (choice !== "commit")

BIN
genaisrc/genaiscript.d.ts generated vendored Normal file

Binary file not shown.

21
genaisrc/tsconfig.json Normal file
View file

@ -0,0 +1,21 @@
{
"compilerOptions": {
"lib": [
"ES2022"
],
"target": "ES2023",
"module": "NodeNext",
"moduleDetection": "force",
"moduleResolution": "nodenext",
"checkJs": true,
"allowJs": true,
"skipLibCheck": true,
"noEmit": true,
"allowImportingTsExtensions": true
},
"include": [
"*.mjs",
"*.mts",
"./genaiscript.d.ts"
]
}

View file

@ -8,7 +8,7 @@
from mk_util import *
def init_version():
set_version(4, 13, 1, 0) # express a default build version or pick up ci build version
set_version(4, 13, 3, 0) # express a default build version or pick up ci build version
# Z3 Project definition
def init_project_def():

View file

@ -2007,11 +2007,11 @@ class MLComponent(Component):
src_dir = self.to_src_dir
mk_dir(os.path.join(BUILD_DIR, self.sub_dir))
api_src = get_component(API_COMPONENT).to_src_dir
# remove /GL and -std=c++17; the ocaml tools don't like them.
# remove /GL and -std=c++20; the ocaml tools don't like them.
if IS_WINDOWS:
out.write('CXXFLAGS_OCAML=$(CXXFLAGS:/GL=)\n')
else:
out.write('CXXFLAGS_OCAML=$(subst -std=c++17,,$(CXXFLAGS))\n')
out.write('CXXFLAGS_OCAML=$(subst -std=c++20,,$(CXXFLAGS))\n')
substitutions = { 'VERSION': "{}.{}.{}.{}".format(VER_MAJOR, VER_MINOR, VER_BUILD, VER_TWEAK) }
@ -2500,7 +2500,7 @@ def mk_config():
config = open(os.path.join(BUILD_DIR, 'config.mk'), 'w')
global CXX, CC, GMP, GUARD_CF, STATIC_BIN, GIT_HASH, CPPFLAGS, CXXFLAGS, LDFLAGS, EXAMP_DEBUG_FLAG, FPMATH_FLAGS, LOG_SYNC, SINGLE_THREADED, IS_ARCH_ARM64
if IS_WINDOWS:
CXXFLAGS = '/nologo /Zi /D WIN32 /D _WINDOWS /EHsc /GS /Gd /std:c++17'
CXXFLAGS = '/nologo /Zi /D WIN32 /D _WINDOWS /EHsc /GS /Gd /std:c++20'
config.write(
'CC=cl\n'
'CXX=cl\n'
@ -2616,7 +2616,7 @@ def mk_config():
CPPFLAGS = '%s -D_MP_INTERNAL' % CPPFLAGS
if GIT_HASH:
CPPFLAGS = '%s -DZ3GITHASH=%s' % (CPPFLAGS, GIT_HASH)
CXXFLAGS = '%s -std=c++17' % CXXFLAGS
CXXFLAGS = '%s -std=c++20' % CXXFLAGS
CXXFLAGS = '%s -fvisibility=hidden -fvisibility-inlines-hidden -c' % CXXFLAGS
FPMATH = test_fpmath(CXX)
CXXFLAGS = '%s %s' % (CXXFLAGS, FPMATH_FLAGS)
@ -2699,7 +2699,7 @@ def mk_config():
config.write('CC=%s\n' % CC)
config.write('CXX=%s\n' % CXX)
config.write('CXXFLAGS=%s %s\n' % (CPPFLAGS, CXXFLAGS))
config.write('CFLAGS=%s %s\n' % (CPPFLAGS, CXXFLAGS.replace('-std=c++17', '')))
config.write('CFLAGS=%s %s\n' % (CPPFLAGS, CXXFLAGS.replace('-std=c++20', '')))
config.write('EXAMP_DEBUG_FLAG=%s\n' % EXAMP_DEBUG_FLAG)
config.write('CXX_OUT_FLAG=-o \n')
config.write('C_OUT_FLAG=-o \n')

View file

@ -375,10 +375,11 @@ def cp_into_bin(arch):
os.path.join(bin_dir, "libz3.lib"))
shutil.rmtree(lib_dir)
if JAVA_ENABLED:
java_dir = get_java_dist_path(arch)
shutil.copytree(java_dir,
bin_dir,
dirs_exist_ok=True)
java_dir = os.path.join(bin_dir, "java")
for file in os.listdir(java_dir):
src_path = os.path.join(java_dir, file)
dst_path = os.path.join(bin_dir, file)
shutil.copy2(src_path, dst_path)
shutil.rmtree(java_dir)
def cp_pdb(arch):

View file

@ -1,7 +1,7 @@
variables:
Major: '4'
Minor: '13'
Patch: '1'
Patch: '3'
ReleaseVersion: $(Major).$(Minor).$(Patch)
AssemblyVersion: $(Major).$(Minor).$(Patch).$(Build.BuildId)
NightlyVersion: $(AssemblyVersion)-$(Build.buildId)
@ -10,7 +10,7 @@ stages:
- stage: Build
jobs:
- job: MacBuild
displayName: "macOS Build"
displayName: "Mac Build"
pool:
vmImage: "macOS-latest"
steps:
@ -35,7 +35,7 @@ stages:
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'macOSBuild'
artifactName: 'macOsBuild'
targetPath: $(Build.ArtifactStagingDirectory)
- job: MacBuildArm64
@ -80,35 +80,6 @@ stages:
artifactName: 'UbuntuBuild'
targetPath: $(Build.ArtifactStagingDirectory)
- job: UbuntuBuild20
displayName: "Ubuntu build 20"
pool:
vmImage: "ubuntu-20.04"
steps:
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
displayName: 'Clone z3test'
- task: PythonScript@0
displayName: Test
inputs:
scriptSource: 'filepath'
scriptPath: z3test/scripts/test_benchmarks.py
arguments: build-dist/z3 z3test/regressions/smt2
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'UbuntuBuild20'
targetPath: $(Build.ArtifactStagingDirectory)
- job: UbuntuArm64
displayName: "Ubuntu ARM64 build"
pool:
@ -121,7 +92,7 @@ stages:
- script: echo '##vso[task.prependpath]/tmp/arm-toolchain/aarch64-none-linux-gnu/libc/usr/bin'
- script: echo $PATH
- script: stat /tmp/arm-toolchain/bin/aarch64-none-linux-gnu-gcc
- script: python scripts/mk_unix_dist.py --nodotnet --nojava --arch=arm64
- script: python scripts/mk_unix_dist.py --nodotnet --arch=arm64
- task: CopyFiles@2
inputs:
sourceFolder: dist
@ -167,74 +138,59 @@ stages:
inputs:
artifactName: 'UbuntuDoc'
targetPath: $(Build.ArtifactStagingDirectory)
- job: LinuxBuilds
displayName: "ManyLinux build"
variables:
name: ManyLinux
python: "/opt/python/cp37-cp37m/bin/python"
# TODO copy artifacts
- job: "ManylinuxPythonBuildAmd64"
displayName: "Python bindings (manylinux Centos AMD64) build"
pool:
vmImage: "ubuntu-latest"
container: "quay.io/pypa/manylinux_2_28_x86_64:latest"
container: "quay.io/pypa/manylinux2014_x86_64:latest"
steps:
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --nodotnet --nojava
pythonInterpreter: $(python)
- script: git clone https://github.com/z3prover/z3test z3test
displayName: 'Clone z3test'
- task: PythonScript@0
displayName: Test
inputs:
scriptSource: 'filepath'
scriptPath: z3test/scripts/test_benchmarks.py
arguments: build-dist/z3 z3test/regressions/smt2
pythonInterpreter: $(python)
- script: "/opt/python/cp38-cp38/bin/python -m venv $PWD/env"
- script: 'echo "##vso[task.prependpath]$PWD/env/bin"'
- script: "pip install build git+https://github.com/rhelmot/auditwheel" # @TODO remove when patches make it upstream
- script: "cd src/api/python && python -m build && AUDITWHEEL_PLAT= auditwheel repair --best-plat dist/*.whl && cd ../../.."
- script: "pip install ./src/api/python/wheelhouse/*.whl && python - <src/api/python/z3test.py z3 && python - <src/api/python/z3test.py z3num"
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
sourceFolder: src/api/python/wheelhouse
contents: '*.whl'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'ManyLinuxBuild'
targetPath: $(Build.ArtifactStagingDirectory)
- job: LinuxBuildsArm64
displayName: "ManyLinux ARM64 build"
artifactName: 'ManyLinuxPythonBuildAMD64'
targetPath: $(Build.ArtifactStagingDirectory)
- job: ManyLinuxPythonBuildArm64
displayName: "Python bindings (manylinux Centos ARM64 cross) build"
variables:
name: ManyLinux
python: "/opt/python/cp37-cp37m/bin/python"
pool:
vmImage: "ubuntu-latest"
container: "quay.io/pypa/manylinux_2_28_x86_64:latest"
container: "quay.io/pypa/manylinux2014_x86_64:latest"
steps:
- script: curl -L -o /tmp/arm-toolchain.tar.xz 'https://developer.arm.com/-/media/Files/downloads/gnu/11.2-2022.02/binrel/gcc-arm-11.2-2022.02-x86_64-aarch64-none-linux-gnu.tar.xz?rev=33c6e30e5ac64e6dba8f0431f2c35f1b&hash=9918A05BF47621B632C7A5C8D2BB438FB80A4480'
- script: mkdir -p /tmp/arm-toolchain/
- script: tar xf /tmp/arm-toolchain.tar.xz -C /tmp/arm-toolchain/ --strip-components=1
- script: "/opt/python/cp38-cp38/bin/python -m venv $PWD/env"
- script: 'echo "##vso[task.prependpath]$PWD/env/bin"'
- script: echo '##vso[task.prependpath]/tmp/arm-toolchain/bin'
- script: echo '##vso[task.prependpath]/tmp/arm-toolchain/aarch64-none-linux-gnu/libc/usr/bin'
- script: echo $PATH
- script: stat /tmp/arm-toolchain/bin/aarch64-none-linux-gnu-gcc
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --nodotnet --nojava --arch=arm64
pythonInterpreter: $(python)
- script: "stat `which aarch64-none-linux-gnu-gcc`"
- script: "pip install build git+https://github.com/rhelmot/auditwheel"
- script: "cd src/api/python && CC=aarch64-none-linux-gnu-gcc CXX=aarch64-none-linux-gnu-g++ AR=aarch64-none-linux-gnu-ar LD=aarch64-none-linux-gnu-ld Z3_CROSS_COMPILING=aarch64 python -m build && AUDITWHEEL_PLAT= auditwheel repair --best-plat dist/*.whl && cd ../../.."
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
sourceFolder: src/api/python/wheelhouse
contents: '*.whl'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'ManyLinuxBuildArm64'
targetPath: $(Build.ArtifactStagingDirectory)
artifactName: 'ManyLinuxPythonBuildArm64'
targetPath: $(Build.ArtifactStagingDirectory)
- template: build-win-signed.yml
parameters:
@ -271,9 +227,9 @@ stages:
artifact: 'WindowsBuild-x64'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download Ubuntu 20.04 Build'
displayName: 'Download Ubuntu Build'
inputs:
artifact: 'UbuntuBuild20'
artifact: 'UbuntuBuild'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download Ubuntu ARM64 Build'
@ -425,6 +381,7 @@ stages:
patchVersion: $(Patch)
arguments: 'pack $(Agent.TempDirectory)\package\out\Microsoft.Z3.x86.sym.nuspec -Version $(NightlyVersion) -OutputDirectory $(Build.ArtifactStagingDirectory) -Verbosity detailed -Symbols -SymbolPackageFormat snupkg -BasePath $(Agent.TempDirectory)\package\out'
- task: EsrpCodeSigning@2
condition: eq(1,0)
continueOnError: true
displayName: 'Sign Package'
inputs:
@ -453,6 +410,7 @@ stages:
MaxConcurrency: '50'
MaxRetryAttempts: '5'
- task: EsrpCodeSigning@2
condition: eq(1,0)
continueOnError: true
displayName: 'Sign Symbol Package'
inputs:
@ -492,46 +450,49 @@ stages:
vmImage: "ubuntu-latest"
steps:
- task: DownloadPipelineArtifact@2
inputs:
artifactName: 'WindowsBuild-x86'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
inputs:
artifactName: 'WindowsBuild-x64'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
inputs:
artifactName: 'ManyLinuxBuild'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
inputs:
artifactName: 'ManyLinuxBuildArm64'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Build'
inputs:
artifactName: 'macOsBuild'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Arm64 Build'
inputs:
artifactName: 'MacArm64'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download Win64 Build'
inputs:
artifactName: 'WindowsBuild-x64'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download Win32 Build'
inputs:
artifactName: 'WindowsBuild-x86'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download ManyLinux Build'
inputs:
artifactName: 'ManyLinuxPythonBuildAMD64'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download ManyLinux Arm64 Build'
inputs:
artifactName: 'ManyLinuxPythonBuildArm64'
targetPath: $(Agent.TempDirectory)
- script: cd $(Agent.TempDirectory); mkdir osx-x64-bin; cd osx-x64-bin; unzip ../*x64-osx*.zip
- script: cd $(Agent.TempDirectory); mkdir osx-arm64-bin; cd osx-arm64-bin; unzip ../*arm64-osx*.zip
- script: cd $(Agent.TempDirectory); mkdir libc-x64-bin; cd libc-x64-bin; unzip ../*x64-glibc*.zip
- script: cd $(Agent.TempDirectory); mkdir libc-arm64-bin; cd libc-arm64-bin; unzip ../*arm64-glibc*.zip
# - script: cd $(Agent.TempDirectory); mkdir musl-bin; cd musl-bin; unzip ../*-linux.zip
- script: cd $(Agent.TempDirectory); mkdir win32-bin; cd win32-bin; unzip ../*x86-win*.zip
- script: cd $(Agent.TempDirectory); mkdir win64-bin; cd win64-bin; unzip ../*x64-win*.zip
- script: python3 -m pip install --user -U setuptools wheel
- script: python3 -m pip install --user -U setuptools
- script: cd src/api/python; python3 setup.py sdist
# take a look at this PREMIUM HACK I came up with to get around the fact that the azure variable syntax overloads the bash syntax for subshells
- script: cd src/api/python; echo $(Agent.TempDirectory)/libc-x64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/libc-arm64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
# - script: cd src/api/python; echo $(Agent.TempDirectory)/musl-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win32-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/osx-x64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/osx-arm64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cp $(Agent.TempDirectory)/*.whl src/api/python/dist
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Python packages'
@ -577,9 +538,9 @@ stages:
artifactName: 'UbuntuArm64'
targetPath: tmp
- task: DownloadPipelineArtifact@2
displayName: "Download Ubuntu-20.04"
displayName: "Download Ubuntu"
inputs:
artifactName: 'UbuntuBuild20'
artifactName: 'UbuntuBuild'
targetPath: tmp
- task: DownloadPipelineArtifact@2
displayName: "Download Doc"

View file

@ -6,7 +6,7 @@
trigger: none
variables:
ReleaseVersion: '4.13.1'
ReleaseVersion: '4.13.3'
stages:
@ -15,9 +15,9 @@ stages:
jobs:
- job: MacBuild
displayName: "macOS Build"
displayName: "Mac Build"
pool:
vmImage: "macOS-11"
vmImage: "macOS-latest"
steps:
- task: PythonScript@0
displayName: Build
@ -40,13 +40,13 @@ stages:
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'macOSBuild'
artifactName: 'macOsBuild'
targetPath: $(Build.ArtifactStagingDirectory)
- job: MacBuildArm64
displayName: "macOS ARM64 Build"
displayName: "Mac ARM64 Build"
pool:
vmImage: "macOS-11"
vmImage: "macOS-latest"
steps:
- script: python scripts/mk_unix_dist.py --dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk --arch=arm64 --os=osx-11.0
- script: git clone https://github.com/z3prover/z3test z3test
@ -85,41 +85,13 @@ stages:
artifactName: 'UbuntuBuild'
targetPath: $(Build.ArtifactStagingDirectory)
- job: UbuntuBuild20
displayName: "Ubuntu build 20"
pool:
vmImage: "ubuntu-20.04"
steps:
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
displayName: 'Clone z3test'
- task: PythonScript@0
displayName: Test
inputs:
scriptSource: 'filepath'
scriptPath: z3test/scripts/test_benchmarks.py
arguments: build-dist/z3 z3test/regressions/smt2
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'UbuntuBuild20'
targetPath: $(Build.ArtifactStagingDirectory)
- job: UbuntuArm64
displayName: "Ubuntu ARM64 build"
pool:
vmImage: "ubuntu-latest"
steps:
- script: curl -L -o /tmp/arm-toolchain.tar.xz 'https://developer.arm.com/-/media/Files/downloads/gnu/11.2-2022.02/binrel/gcc-arm-11.2-2022.02-x86_64-aarch64-none-linux-gnu.tar.xz?rev=33c6e30e5ac64e6dba8f0431f2c35f1b&hash=9918A05BF47621B632C7A5C8D2BB438FB80A4480'
- script: curl -L -o /tmp/arm-toolchain.tar.xz 'https://developer.arm.com/-/media/Files/downloads/gnu/11.2-2022.02/binrel/gcc-arm-11.2-2022.02-x86_64-aarch64-none-linux-gnu.tar.xz?rev=33c6e30e5ac64e6dba8f0431f2c35f1b&hash=9918A05BF47621B632C7A5C8D2BB438FB80A4480'
- script: mkdir -p /tmp/arm-toolchain/
- script: tar xf /tmp/arm-toolchain.tar.xz -C /tmp/arm-toolchain/ --strip-components=1
- script: echo '##vso[task.prependpath]/tmp/arm-toolchain/bin'
@ -173,43 +145,29 @@ stages:
artifactName: 'UbuntuDoc'
targetPath: $(Build.ArtifactStagingDirectory)
- job: LinuxBuilds
displayName: "ManyLinux build"
variables:
name: ManyLinux
python: "/opt/python/cp37-cp37m/bin/python"
- job: "ManylinuxPythonBuildAmd64"
displayName: "Python bindings (manylinux Centos AMD64) build"
pool:
vmImage: "ubuntu-latest"
container: "quay.io/pypa/manylinux2014_x86_64:latest"
steps:
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --nodotnet --nojava
pythonInterpreter: $(python)
- script: git clone https://github.com/z3prover/z3test z3test
displayName: 'Clone z3test'
- task: PythonScript@0
displayName: Test
inputs:
scriptSource: 'filepath'
scriptPath: z3test/scripts/test_benchmarks.py
arguments: build-dist/z3 z3test/regressions/smt2
pythonInterpreter: $(python)
- script: "/opt/python/cp38-cp38/bin/python -m venv $PWD/env"
- script: 'echo "##vso[task.prependpath]$PWD/env/bin"'
- script: "pip install build git+https://github.com/rhelmot/auditwheel" # @TODO remove when patches make it upstream
- script: "cd src/api/python && python -m build && AUDITWHEEL_PLAT= auditwheel repair --best-plat dist/*.whl && cd ../../.."
- script: "pip install ./src/api/python/wheelhouse/*.whl && python - <src/api/python/z3test.py z3 && python - <src/api/python/z3test.py z3num"
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
sourceFolder: src/api/python/wheelhouse
contents: '*.whl'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'ManyLinuxBuild'
targetPath: $(Build.ArtifactStagingDirectory)
- job: LinuxBuildsArm64
displayName: "ManyLinux ARM64 build"
artifactName: 'ManyLinuxPythonBuildAMD64'
targetPath: $(Build.ArtifactStagingDirectory)
- job: ManyLinuxPythonBuildArm64
displayName: "Python bindings (manylinux Centos ARM64 cross) build"
variables:
name: ManyLinux
python: "/opt/python/cp37-cp37m/bin/python"
@ -220,26 +178,24 @@ stages:
- script: curl -L -o /tmp/arm-toolchain.tar.xz 'https://developer.arm.com/-/media/Files/downloads/gnu/11.2-2022.02/binrel/gcc-arm-11.2-2022.02-x86_64-aarch64-none-linux-gnu.tar.xz?rev=33c6e30e5ac64e6dba8f0431f2c35f1b&hash=9918A05BF47621B632C7A5C8D2BB438FB80A4480'
- script: mkdir -p /tmp/arm-toolchain/
- script: tar xf /tmp/arm-toolchain.tar.xz -C /tmp/arm-toolchain/ --strip-components=1
- script: "/opt/python/cp38-cp38/bin/python -m venv $PWD/env"
- script: 'echo "##vso[task.prependpath]$PWD/env/bin"'
- script: echo '##vso[task.prependpath]/tmp/arm-toolchain/bin'
- script: echo '##vso[task.prependpath]/tmp/arm-toolchain/aarch64-none-linux-gnu/libc/usr/bin'
- script: echo $PATH
- script: stat /tmp/arm-toolchain/bin/aarch64-none-linux-gnu-gcc
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --nodotnet --nojava --arch=arm64
pythonInterpreter: $(python)
- script: "stat `which aarch64-none-linux-gnu-gcc`"
- script: "pip install build git+https://github.com/rhelmot/auditwheel"
- script: "cd src/api/python && CC=aarch64-none-linux-gnu-gcc CXX=aarch64-none-linux-gnu-g++ AR=aarch64-none-linux-gnu-ar LD=aarch64-none-linux-gnu-ld Z3_CROSS_COMPILING=aarch64 python -m build && AUDITWHEEL_PLAT= auditwheel repair --best-plat dist/*.whl && cd ../../.."
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
sourceFolder: src/api/python/wheelhouse
contents: '*.whl'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'ManyLinuxBuildArm64'
targetPath: $(Build.ArtifactStagingDirectory)
artifactName: 'ManyLinuxPythonBuildArm64'
targetPath: $(Build.ArtifactStagingDirectory)
- template: build-win-signed.yml
parameters:
@ -283,11 +239,6 @@ stages:
inputs:
artifact: 'UbuntuBuild'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download Ubuntu20 Build'
inputs:
artifact: 'UbuntuBuild20'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download Ubuntu ARM64 Build'
inputs:
@ -296,8 +247,14 @@ stages:
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Build'
inputs:
artifact: 'macOSBuild'
artifact: 'macOsBuild'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Arm64 Build'
inputs:
artifact: 'MacArm64'
path: $(Agent.TempDirectory)\package
- task: NuGetToolInstaller@0
inputs:
versionSpec: 5.x
@ -322,6 +279,8 @@ stages:
command: custom
arguments: 'pack $(Agent.TempDirectory)\package\out\Microsoft.Z3.sym.nuspec -OutputDirectory $(Build.ArtifactStagingDirectory) -Verbosity detailed -Symbols -SymbolPackageFormat snupkg -BasePath $(Agent.TempDirectory)\package\out'
- task: EsrpCodeSigning@2
condition: eq(1,0)
continueOnError: true
displayName: 'Sign Package'
inputs:
ConnectedServiceName: 'z3-esrp-signing-2'
@ -350,6 +309,8 @@ stages:
MaxRetryAttempts: '5'
- task: EsrpCodeSigning@2
displayName: 'Sign Symbol Package'
condition: eq(1,0)
continueOnError: true
inputs:
ConnectedServiceName: 'z3-esrp-signing-2'
FolderPath: $(Build.ArtifactStagingDirectory)
@ -421,6 +382,8 @@ stages:
command: custom
arguments: 'pack $(Agent.TempDirectory)\package\out\Microsoft.Z3.x86.sym.nuspec -OutputDirectory $(Build.ArtifactStagingDirectory) -Verbosity detailed -Symbols -SymbolPackageFormat snupkg -BasePath $(Agent.TempDirectory)\package\out'
- task: EsrpCodeSigning@2
condition: eq(1,0)
continueOnError: true
displayName: 'Sign Package'
inputs:
ConnectedServiceName: 'z3-esrp-signing-2'
@ -448,6 +411,8 @@ stages:
MaxConcurrency: '50'
MaxRetryAttempts: '5'
- task: EsrpCodeSigning@2
condition: eq(1,0)
continueOnError: true
displayName: 'Sign Symbol Package'
inputs:
ConnectedServiceName: 'z3-esrp-signing-2'
@ -480,7 +445,7 @@ stages:
artifactName: 'NuGet32'
- job: PythonPackage
- job: Python
displayName: "Python packaging"
pool:
vmImage: "ubuntu-latest"
@ -488,22 +453,12 @@ stages:
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Build'
inputs:
artifact: 'macOSBuild'
artifact: 'macOsBuild'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Arm64 Build'
inputs:
artifact: 'MacArm64'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download ManyLinux Build'
inputs:
artifact: 'ManyLinuxBuild'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download ManyLinux Arm64 Build'
inputs:
artifact: 'ManyLinuxBuildArm64'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download Win32 Build'
@ -514,22 +469,31 @@ stages:
displayName: 'Download Win64 Build'
inputs:
artifact: 'WindowsBuild-x64'
path: $(Agent.TempDirectory)
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download ManyLinux Build'
inputs:
artifactName: 'ManyLinuxPythonBuildAMD64'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download ManyLinux Arm64 Build'
inputs:
artifactName: 'ManyLinuxPythonBuildArm64'
targetPath: $(Agent.TempDirectory)
- script: cd $(Agent.TempDirectory); mkdir osx-x64-bin; cd osx-x64-bin; unzip ../*x64-osx*.zip
- script: cd $(Agent.TempDirectory); mkdir osx-arm64-bin; cd osx-arm64-bin; unzip ../*arm64-osx*.zip
- script: cd $(Agent.TempDirectory); mkdir libc-x64-bin; cd libc-x64-bin; unzip ../*x64-glibc*.zip
- script: cd $(Agent.TempDirectory); mkdir libc-arm64-bin; cd libc-arm64-bin; unzip ../*arm64-glibc*.zip
# - script: cd $(Agent.TempDirectory); mkdir musl-bin; cd musl-bin; unzip ../*-linux.zip
- script: cd $(Agent.TempDirectory); mkdir win32-bin; cd win32-bin; unzip ../*x86-win*.zip
- script: cd $(Agent.TempDirectory); mkdir win64-bin; cd win64-bin; unzip ../*x64-win*.zip
- script: python3 -m pip install --user -U setuptools wheel
- script: python3 -m pip install --user -U setuptools
- script: cd src/api/python; python3 setup.py sdist
# take a look at this PREMIUM HACK I came up with to get around the fact that the azure variable syntax overloads the bash syntax for subshells
- script: cd src/api/python; echo $(Agent.TempDirectory)/osx-x64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/osx-arm64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/libc-x64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/libc-arm64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
# - script: cd src/api/python; echo $(Agent.TempDirectory)/musl-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win32-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/osx-x64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/osx-arm64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cp $(Agent.TempDirectory)/*.whl src/api/python/dist
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'PythonPackage'
@ -545,11 +509,6 @@ stages:
pool:
vmImage: "windows-latest"
steps:
- task: DownloadPipelineArtifact@2
displayName: 'Download Ubuntu Build'
inputs:
artifact: 'UbuntuBuild20'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download Ubuntu Build'
inputs:
@ -568,7 +527,7 @@ stages:
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Build'
inputs:
artifact: 'macOSBuild'
artifact: 'macOsBuild'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download macOSArm64 Build'

View file

@ -60,8 +60,6 @@ class ackr_bound_probe : public probe {
};
public:
ackr_bound_probe() {}
result operator()(goal const & g) override {
proc p(g.m());
unsigned sz = g.size();

View file

@ -41,8 +41,6 @@ public:
, m_ackr_helper(m)
{}
~imp() { }
//
// Returns true iff model was successfully constructed.
// Conflicts are saved as a side effect.

View file

@ -459,6 +459,21 @@ extern "C" {
Z3_CATCH;
}
void Z3_API Z3_optimize_set_initial_value(Z3_context c, Z3_optimize o, Z3_ast var, Z3_ast value) {
Z3_TRY;
LOG_Z3_optimize_set_initial_value(c, o, var, value);
RESET_ERROR_CODE();
if (to_expr(var)->get_sort() != to_expr(value)->get_sort()) {
SET_ERROR_CODE(Z3_INVALID_USAGE, "variable and value should have same sort");
return;
}
ast_manager& m = mk_c(c)->m();
if (!m.is_value(to_expr(value))) {
SET_ERROR_CODE(Z3_INVALID_USAGE, "a proper value was not supplied");
return;
}
to_optimize_ptr(o)->initialize_value(to_expr(var), to_expr(value));
Z3_CATCH;
}
};

View file

@ -1143,5 +1143,23 @@ extern "C" {
Z3_CATCH_RETURN(nullptr);
}
void Z3_API Z3_solver_set_initial_value(Z3_context c, Z3_solver s, Z3_ast var, Z3_ast value) {
Z3_TRY;
LOG_Z3_solver_set_initial_value(c, s, var, value);
RESET_ERROR_CODE();
if (to_expr(var)->get_sort() != to_expr(value)->get_sort()) {
SET_ERROR_CODE(Z3_INVALID_USAGE, "variable and value should have same sort");
return;
}
ast_manager& m = mk_c(c)->m();
if (!m.is_value(to_expr(value))) {
SET_ERROR_CODE(Z3_INVALID_USAGE, "a proper value was not supplied");
return;
}
to_solver_ref(s)->user_propagate_initialize_value(to_expr(var), to_expr(value));
Z3_CATCH;
}
};

View file

@ -1603,10 +1603,10 @@ namespace z3 {
unsigned i;
public:
iterator(expr& e, unsigned i): e(e), i(i) {}
bool operator==(iterator const& other) noexcept {
bool operator==(iterator const& other) const noexcept {
return i == other.i;
}
bool operator!=(iterator const& other) noexcept {
bool operator!=(iterator const& other) const noexcept {
return i != other.i;
}
expr operator*() const { return e.arg(i); }
@ -2865,6 +2865,17 @@ namespace z3 {
check_error();
return result;
}
void set_initial_value(expr const& var, expr const& value) {
Z3_solver_set_initial_value(ctx(), m_solver, var, value);
check_error();
}
void set_initial_value(expr const& var, int i) {
set_initial_value(var, ctx().num_val(i, var.get_sort()));
}
void set_initial_value(expr const& var, bool b) {
set_initial_value(var, ctx().bool_val(b));
}
expr proof() const { Z3_ast r = Z3_solver_get_proof(ctx(), m_solver); check_error(); return expr(ctx(), r); }
friend std::ostream & operator<<(std::ostream & out, solver const & s);
@ -2946,10 +2957,10 @@ namespace z3 {
expr_vector const * operator->() const { return &(operator*()); }
expr_vector const& operator*() const noexcept { return m_cube; }
bool operator==(cube_iterator const& other) noexcept {
bool operator==(cube_iterator const& other) const noexcept {
return other.m_end == m_end;
};
bool operator!=(cube_iterator const& other) noexcept {
bool operator!=(cube_iterator const& other) const noexcept {
return other.m_end != m_end;
};
@ -3330,6 +3341,17 @@ namespace z3 {
handle add(expr const& e, unsigned weight) {
return add_soft(e, weight);
}
void set_initial_value(expr const& var, expr const& value) {
Z3_optimize_set_initial_value(ctx(), m_opt, var, value);
check_error();
}
void set_initial_value(expr const& var, int i) {
set_initial_value(var, ctx().num_val(i, var.get_sort()));
}
void set_initial_value(expr const& var, bool b) {
set_initial_value(var, ctx().bool_val(b));
}
handle maximize(expr const& e) {
return handle(Z3_optimize_maximize(ctx(), m_opt, e));
}

File diff suppressed because it is too large Load diff

View file

@ -29,7 +29,7 @@
"clean": "rimraf build 'src/**/*.__GENERATED__.*'",
"lint": "prettier -c '{./,src/,scripts/,examples/}**/*.{js,ts}'",
"format": "prettier --write '{./,src/,scripts/}**/*.{js,ts}'",
"test": "jest",
"test": "node --expose-gc ./node_modules/.bin/jest",
"docs": "typedoc",
"check-engine": "check-engine"
},

View file

@ -69,7 +69,7 @@ const fns = JSON.stringify(exportedFuncs());
const methods = '["ccall","FS","allocate","UTF8ToString","intArrayFromString","ALLOC_NORMAL"]';
const libz3a = path.normalize('../../../build/libz3.a');
spawnSync(
`emcc build/async-fns.cc ${libz3a} --std=c++20 --pre-js src/low-level/async-wrapper.js -g2 -pthread -fexceptions -s WASM_BIGINT -s USE_PTHREADS=1 -s PTHREAD_POOL_SIZE=0 -s PTHREAD_POOL_SIZE_STRICT=0 -s MODULARIZE=1 -s 'EXPORT_NAME="initZ3"' -s EXPORTED_RUNTIME_METHODS=${methods} -s EXPORTED_FUNCTIONS=${fns} -s DISABLE_EXCEPTION_CATCHING=0 -s SAFE_HEAP=0 -s DEMANGLE_SUPPORT=1 -s TOTAL_MEMORY=1GB -s TOTAL_STACK=20MB -I z3/src/api/ -o build/z3-built.js`,
`emcc build/async-fns.cc ${libz3a} --std=c++20 --pre-js src/low-level/async-wrapper.js -g2 -pthread -fexceptions -s WASM_BIGINT -s USE_PTHREADS=1 -s PTHREAD_POOL_SIZE=0 -s PTHREAD_POOL_SIZE_STRICT=0 -s MODULARIZE=1 -s 'EXPORT_NAME="initZ3"' -s EXPORTED_RUNTIME_METHODS=${methods} -s EXPORTED_FUNCTIONS=${fns} -s DISABLE_EXCEPTION_CATCHING=0 -s SAFE_HEAP=0 -s DEMANGLE_SUPPORT=1 -s TOTAL_MEMORY=2GB -s TOTAL_STACK=20MB -I z3/src/api/ -o build/z3-built.js`,
);
fs.rmSync(ccWrapperPath);

View file

@ -4,6 +4,12 @@ import { init, killThreads } from '../jest';
import { Arith, Bool, Model, Quantifier, Z3AssertionError, Z3HighLevel, AstVector } from './types';
import { expectType } from 'ts-expect';
// this should not be necessary but there may be a Jest bug
// https://github.com/jestjs/jest/issues/7874
afterEach(() => {
global.gc && global.gc();
});
/**
* Generate all possible solutions from given assumptions.
*
@ -355,6 +361,7 @@ describe('high-level', () => {
});
});
describe('bitvectors', () => {
it('can do simple proofs', async () => {
const { BitVec, Concat, Implies, isBitVecVal } = api.Context('main');
@ -373,7 +380,7 @@ describe('high-level', () => {
const y = BitVec.const('y', 32);
await prove(Implies(Concat(x, y).eq(Concat(y, x)), x.eq(y)));
});
}, 10_000 /* timeout ms */);
it('finds x and y such that: x ^ y - 103 == x * y', async () => {
const { BitVec, isBitVecVal } = api.Context('main');
@ -393,6 +400,7 @@ describe('high-level', () => {
});
});
describe('arrays', () => {
it('Example 1', async () => {
const Z3 = api.Context('main');
@ -447,7 +455,7 @@ describe('high-level', () => {
await prove(Eq(arr2.select(0), FIVE_VAL));
await prove(Not(Eq(arr2.select(0), BitVec.val(6, 256))));
await prove(Eq(arr2.store(idx, val).select(idx), constArr.store(idx, val).select(idx)));
});
}, 10_000 /* timeout ms */);
it('Finds arrays that differ but that sum to the same', async () => {
const Z3 = api.Context('main');

View file

@ -1,6 +1,8 @@
# Julia bindings
The Julia package [Z3.jl](https://github.com/ahumenberger/Z3.jl) provides and interface to Z3 by exposing its C++ API via [CxxWrap.jl](https://github.com/JuliaInterop/CxxWrap.jl). The bindings therefore consist of a [C++ part](z3jl.cpp) and a [Julia part](https://github.com/ahumenberger/Z3.jl). The C++ part defines the Z3 types/methods which are exposed. The resulting library is loaded in the Julia part via CxxWrap.jl which creates the corresponding Julia types/methods.
The Julia package [Z3.jl](https://github.com/ahumenberger/Z3.jl) provides and interface to Z3 by exposing its C API.
A previous version exposed the C++ API via [CxxWrap.jl](https://github.com/JuliaInterop/CxxWrap.jl). The bindings therefore consisted of a [C++ part](z3jl.cpp) and a [Julia part](https://github.com/ahumenberger/Z3.jl). The C++ part defines the Z3 types/methods which are exposed. The resulting library is loaded in the Julia part via CxxWrap.jl which creates the corresponding Julia types/methods.
## Building the C++ part

View file

@ -1,3 +1,3 @@
[build-system]
requires = ["setuptools>=46.4.0", "wheel", "cmake"]
requires = ["setuptools>=70", "cmake"]
build-backend = "setuptools.build_meta"

View file

@ -7,18 +7,17 @@ import multiprocessing
import re
import glob
from setuptools import setup
from distutils.util import get_platform
from distutils.errors import LibError
from distutils.command.build import build as _build
from distutils.command.sdist import sdist as _sdist
from distutils.command.clean import clean as _clean
from setuptools.command.build import build as _build
from setuptools.command.sdist import sdist as _sdist
from setuptools.command.bdist_wheel import bdist_wheel as _bdist_wheel
from setuptools.command.develop import develop as _develop
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class LibError(Exception):
pass
build_env = dict(os.environ)
build_env['PYTHON'] = sys.executable
build_env['CXXFLAGS'] = build_env.get('CXXFLAGS', '') + " -std=c++17"
build_env['CXXFLAGS'] = build_env.get('CXXFLAGS', '') + " -std=c++20"
# determine where we're building and where sources are
ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
@ -33,6 +32,8 @@ if RELEASE_DIR is None:
HEADER_DIRS = [os.path.join(SRC_DIR, 'src', 'api'), os.path.join(SRC_DIR, 'src', 'api', 'c++')]
RELEASE_METADATA = None
BUILD_PLATFORM = sys.platform
BUILD_ARCH = os.environ.get("Z3_CROSS_COMPILING", platform.machine())
BUILD_OS_VERSION = platform.mac_ver()[0].split(".")
else:
if not os.path.isdir(RELEASE_DIR):
raise Exception("RELEASE_DIR (%s) is not a directory!" % RELEASE_DIR)
@ -43,6 +44,11 @@ else:
raise Exception("RELEASE_DIR (%s) must be in the format z3-version-arch-os[-osversion] so we can extract metadata from it. Sorry!" % RELEASE_DIR)
RELEASE_METADATA.pop(0)
BUILD_PLATFORM = RELEASE_METADATA[2]
BUILD_ARCH = RELEASE_METADATA[1]
if len(RELEASE_METADATA) == 4:
BUILD_OS_VERSION = RELEASE_METADATA[3].split(".")
else:
BUILD_OS_VERSION = None
# determine where destinations are
LIBS_DIR = os.path.join(ROOT_DIR, 'z3', 'lib')
@ -50,7 +56,7 @@ HEADERS_DIR = os.path.join(ROOT_DIR, 'z3', 'include')
BINS_DIR = os.path.join(ROOT_DIR, 'bin')
# determine platform-specific filenames
if BUILD_PLATFORM in ('darwin', 'osx'):
if BUILD_PLATFORM in ('sequoia','darwin', 'osx'):
LIBRARY_FILE = "libz3.dylib"
EXECUTABLE_FILE = "z3"
elif BUILD_PLATFORM in ('win32', 'cygwin', 'win'):
@ -193,7 +199,7 @@ def _copy_bins():
link_name = None
if BUILD_PLATFORM in ('win32', 'cygwin', 'win'):
pass # TODO: When windows VMs work on M1, fill this in
elif BUILD_PLATFORM in ('darwin', 'osx'):
elif BUILD_PLATFORM in ('sequoia', 'darwin', 'osx'):
split = LIBRARY_FILE.split('.')
link_name = split[0] + '.' + major_minor + '.' + split[1]
else:
@ -238,111 +244,41 @@ class develop(_develop):
self.execute(_copy_bins, (), msg="Copying binaries")
_develop.run(self)
class bdist_egg(_bdist_egg):
def run(self):
self.run_command('build')
_bdist_egg.run(self)
class sdist(_sdist):
def run(self):
self.execute(_clean_bins, (), msg="Cleaning binary files and headers")
self.execute(_copy_sources, (), msg="Copying source files")
_sdist.run(self)
class clean(_clean):
def run(self):
self.execute(_clean_bins, (), msg="Cleaning binary files and headers")
self.execute(_clean_native_build, (), msg="Cleaning native build")
_clean.run(self)
class bdist_wheel(_bdist_wheel):
def finalize_options(self):
if BUILD_ARCH is not None and BUILD_PLATFORM is not None:
os_version_tag = '_'.join(BUILD_OS_VERSION[:2]) if BUILD_OS_VERSION is not None else 'xxxxxx'
TAGS = {
# linux tags cannot be deployed - they must be auditwheel'd to pick the right compatibility tag based on imported libc symbol versions
("linux", "x86_64"): "linux_x86_64",
("linux", "aarch64"): "linux_aarch64",
# windows arm64 is not supported by pypi yet
("win", "x64"): "win_amd64",
("win", "x86"): "win32",
("osx", "x64"): f"macosx_{os_version_tag}_x86_64",
("osx", "arm64"): f"macosx_{os_version_tag}_arm64",
("darwin", "x86_64"): f"macosx_{os_version_tag}_x86_64",
("darwin", "x64"): f"macosx_{os_version_tag}_x86_64",
("darwin", "arm64"): f"macosx_{os_version_tag}_arm64",
("sequoia", "x64"): f"macosx_{os_version_tag}_x86_64",
("sequoia", "x86_64"): f"macosx_{os_version_tag}_x86_64",
("sequoia", "arm64"): f"macosx_{os_version_tag}_arm64",
} # type: dict[tuple[str, str], str]
self.plat_name = TAGS[(BUILD_PLATFORM, BUILD_ARCH)]
return super().finalize_options()
# the build directory needs to exist
#try: os.makedirs(os.path.join(ROOT_DIR, 'build'))
#except OSError: pass
# platform.freedesktop_os_release was added in 3.10
os_id = ''
if hasattr(platform, 'freedesktop_os_release'):
try:
osr = platform.freedesktop_os_release()
print(osr)
os_id = osr['ID']
except OSError:
pass
if 'bdist_wheel' in sys.argv and '--plat-name' not in sys.argv:
if RELEASE_DIR is None:
name = get_platform()
if 'linux' in name:
# linux_* platform tags are disallowed because the python ecosystem is fubar
# linux builds should be built in the centos 5 vm for maximum compatibility
# see https://github.com/pypa/manylinux
# see also https://github.com/angr/angr-dev/blob/master/admin/bdist.py
plat_name = 'manylinux_2_28_' + platform.machine()
elif 'mingw' in name:
if platform.architecture()[0] == '64bit':
plat_name = 'win_amd64'
else:
plat_name ='win32'
else:
# https://www.python.org/dev/peps/pep-0425/
plat_name = name.replace('.', '_').replace('-', '_')
else:
# extract the architecture of the release from the directory name
arch = RELEASE_METADATA[1]
distos = RELEASE_METADATA[2]
if distos in ('debian', 'ubuntu'):
raise Exception(
"Linux binary distributions must be built on centos to conform to PEP 513 or alpine if targeting musl"
)
elif distos == 'glibc':
if arch == 'x64':
plat_name = 'manylinux_2_28_x86_64'
elif arch == 'arm64' or arch == 'aarch64':
# context on why are we match on arm64
# but use aarch64 on the plat_name is
# due to a workaround current python
# legacy build doesn't support aarch64
# so using the currently supported arm64
# build and simply rename it to aarch64
# see full context on #7148
plat_name = 'manylinux_2_28_aarch64'
else:
plat_name = 'manylinux_2_28_i686'
elif distos == 'linux' and os_id == 'alpine':
if arch == 'x64':
plat_name = 'musllinux_1_1_x86_64'
else:
plat_name = 'musllinux_1_1_i686'
elif distos == 'win':
if arch == 'x64':
plat_name = 'win_amd64'
else:
plat_name = 'win32'
elif distos == 'osx':
osver = RELEASE_METADATA[3]
if osver.count('.') > 1:
osver = '.'.join(osver.split('.')[:2])
if osver.startswith("11"):
osver = "11_0"
if arch == 'x64':
plat_name ='macosx_%s_x86_64' % osver.replace('.', '_')
elif arch == 'arm64':
plat_name ='macosx_%s_arm64' % osver.replace('.', '_')
else:
raise Exception(f"idk how os {distos} {osver} works. what goes here?")
else:
raise Exception(f"idk how to translate between this z3 release os {distos} and the python naming scheme")
idx = sys.argv.index('bdist_wheel') + 1
sys.argv.insert(idx, '--plat-name')
sys.argv.insert(idx + 1, plat_name)
sys.argv.insert(idx + 2, '--universal') # supports py2+py3. if --plat-name is not specified this will also mean that the package can be installed on any machine regardless of architecture, so watch out!
setup(
name='z3-solver',
version=_z3_version(),
description='an efficient SMT solver library',
long_description='Z3 is a theorem prover from Microsoft Research with support for bitvectors, booleans, arrays, floating point numbers, strings, and other data types.\n\nFor documentation, please read http://z3prover.github.io/api/html/z3.html\n\nIn the event of technical difficulties related to configuration, compilation, or installation, please submit issues to https://github.com/z3prover/z3.git',
long_description='Z3 is a theorem prover from Microsoft Research with support for bitvectors, booleans, arrays, floating point numbers, strings, and other data types.\n\nFor documentation, please read http://z3prover.github.io/api/html/z3.html',
author="The Z3 Theorem Prover Project",
maintainer="Audrey Dutcher and Nikolaj Bjorner",
maintainer_email="audrey@rhelmot.io",
@ -356,5 +292,5 @@ setup(
'z3': [os.path.join('lib', '*'), os.path.join('include', '*.h'), os.path.join('include', 'c++', '*.h')]
},
data_files=[('bin',[os.path.join('bin',EXECUTABLE_FILE)])],
cmdclass={'build': build, 'develop': develop, 'sdist': sdist, 'bdist_egg': bdist_egg, 'clean': clean},
cmdclass={'build': build, 'develop': develop, 'sdist': sdist, 'bdist_wheel': bdist_wheel},
)

View file

@ -6798,7 +6798,7 @@ class Statistics:
sat
>>> st = s.statistics()
>>> len(st)
6
7
"""
return int(Z3_stats_size(self.ctx.ref(), self.stats))
@ -6812,11 +6812,11 @@ class Statistics:
sat
>>> st = s.statistics()
>>> len(st)
6
7
>>> st[0]
('nlsat propagations', 2)
>>> st[1]
('nlsat stages', 2)
('nlsat restarts', 1)
"""
if idx >= len(self):
raise IndexError
@ -7353,6 +7353,13 @@ class Solver(Z3PPObject):
Z3_solver_get_levels(self.ctx.ref(), self.solver, trail.vector, len(trail), levels)
return trail, levels
def set_initial_value(self, var, value):
"""initialize the solver's state by setting the initial value of var to value
"""
s = var.sort()
value = s.cast(value)
Z3_solver_set_initial_value(self.ctx.ref(), self.solver, var.ast, value.ast)
def trail(self):
"""Return trail of the solver state after a check() call.
"""
@ -7926,9 +7933,12 @@ _on_model_eh = on_model_eh_type(_global_on_model)
class Optimize(Z3PPObject):
"""Optimize API provides methods for solving using objective functions and weighted soft constraints"""
def __init__(self, ctx=None):
def __init__(self, optimize=None, ctx=None):
self.ctx = _get_ctx(ctx)
self.optimize = Z3_mk_optimize(self.ctx.ref())
if optimize is None:
self.optimize = Z3_mk_optimize(self.ctx.ref())
else:
self.optimize = optimize
self._on_models_id = None
Z3_optimize_inc_ref(self.ctx.ref(), self.optimize)
@ -8029,6 +8039,13 @@ class Optimize(Z3PPObject):
return [asoft(a) for a in arg]
return asoft(arg)
def set_initial_value(self, var, value):
"""initialize the solver's state by setting the initial value of var to value
"""
s = var.sort()
value = s.cast(value)
Z3_optimize_set_initial_value(self.ctx.ref(), self.optimize, var.ast, value.ast)
def maximize(self, arg):
"""Add objective function to maximize."""
return OptimizeObjective(
@ -10220,7 +10237,7 @@ def FPs(names, fpsort, ctx=None):
>>> x.ebits()
8
>>> fpMul(RNE(), fpAdd(RNE(), x, y), z)
x + y * z
(x + y) * z
"""
ctx = _get_ctx(ctx)
if isinstance(names, str):

View file

@ -1412,8 +1412,10 @@ class HTMLFormatter(Formatter):
ys_pp = group(seq(ys))
if a.is_forall():
header = "&forall;"
else:
elif a.is_exists():
header = "&exist;"
else:
header = "&lambda;"
return group(compose(to_format(header, 1),
indent(1, compose(ys_pp, to_format(" :"), line_break(), body_pp))))

View file

@ -7241,6 +7241,18 @@ extern "C" {
bool Z3_API Z3_solver_propagate_consequence(Z3_context c, Z3_solver_callback cb, unsigned num_fixed, Z3_ast const* fixed, unsigned num_eqs, Z3_ast const* eq_lhs, Z3_ast const* eq_rhs, Z3_ast conseq);
/**
\brief provide an initialization hint to the solver. The initialization hint is used to calibrate an initial value of the expression that
represents a variable. If the variable is Boolean, the initial phase is set according to \c value. If the variable is an integer or real,
the initial Simplex tableau is recalibrated to attempt to follow the value assignment.
def_API('Z3_solver_set_initial_value', VOID, (_in(CONTEXT), _in(SOLVER), _in(AST), _in(AST)))
*/
void Z3_API Z3_solver_set_initial_value(Z3_context c, Z3_solver s, Z3_ast v, Z3_ast val);
/**
\brief Check whether the assertions in a given solver are consistent or not.

View file

@ -139,6 +139,18 @@ extern "C" {
*/
void Z3_API Z3_optimize_pop(Z3_context c, Z3_optimize d);
/**
\brief provide an initialization hint to the solver.
The initialization hint is used to calibrate an initial value of the expression that
represents a variable. If the variable is Boolean, the initial phase is set
according to \c value. If the variable is an integer or real,
the initial Simplex tableau is recalibrated to attempt to follow the value assignment.
def_API('Z3_optimize_set_initial_value', VOID, (_in(CONTEXT), _in(OPTIMIZE), _in(AST), _in(AST)))
*/
void Z3_API Z3_optimize_set_initial_value(Z3_context c, Z3_optimize o, Z3_ast v, Z3_ast val);
/**
\brief Check consistency and produce optimal values.
\param c - context

View file

@ -34,9 +34,6 @@ struct arith_decl_plugin::algebraic_numbers_wrapper {
m_nums(m_amanager) {
}
~algebraic_numbers_wrapper() {
}
unsigned mk_id(algebraic_numbers::anum const & val) {
SASSERT(!m_amanager.is_rational(val));
unsigned idx = m_id_gen.mk();

View file

@ -577,9 +577,9 @@ void array_decl_plugin::get_sort_names(svector<builtin_name>& sort_names, symbol
void array_decl_plugin::get_op_names(svector<builtin_name>& op_names, symbol const & logic) {
op_names.push_back(builtin_name("store",OP_STORE));
op_names.push_back(builtin_name("select",OP_SELECT));
op_names.push_back(builtin_name("const",OP_CONST_ARRAY)); // github issue #7383
if (logic == symbol::null || logic == symbol("HORN") || logic == symbol("ALL")) {
// none of the SMT2 logics support these extensions
op_names.push_back(builtin_name("const",OP_CONST_ARRAY));
op_names.push_back(builtin_name("map",OP_ARRAY_MAP));
op_names.push_back(builtin_name("default",OP_ARRAY_DEFAULT));
op_names.push_back(builtin_name("union",OP_SET_UNION));

View file

@ -58,7 +58,7 @@ parameter::parameter(parameter const& other) : m_val(other.m_val) {
}
void parameter::init_eh(ast_manager & m) {
if (is_ast()) {
if (is_ast()) {
m.inc_ref(get_ast());
}
}
@ -1008,7 +1008,8 @@ sort* basic_decl_plugin::join(unsigned n, expr* const* es) {
}
sort* basic_decl_plugin::join(sort* s1, sort* s2) {
if (s1 == s2) return s1;
if (s1 == s2)
return s1;
if (s1->get_family_id() == arith_family_id &&
s2->get_family_id() == arith_family_id) {
if (s1->get_decl_kind() == REAL_SORT) {
@ -1016,6 +1017,10 @@ sort* basic_decl_plugin::join(sort* s1, sort* s2) {
}
return s2;
}
if (s1 == m_bool_sort && s2->get_family_id() == arith_family_id)
return s2;
if (s2 == m_bool_sort && s1->get_family_id() == arith_family_id)
return s1;
std::ostringstream buffer;
buffer << "Sorts " << mk_pp(s1, *m_manager) << " and " << mk_pp(s2, *m_manager) << " are incompatible";
throw ast_exception(buffer.str());

View file

@ -45,8 +45,6 @@ struct ast_pp_dot_st {
m_printed(),
m_to_print(),
m_first(true) {}
~ast_pp_dot_st() {};
void push_term(const expr * a) { m_to_print.push_back(a); }

View file

@ -31,6 +31,7 @@ Revision History:
#include "ast/datatype_decl_plugin.h"
#include "ast/seq_decl_plugin.h"
#include "ast/fpa_decl_plugin.h"
#include "ast/recfun_decl_plugin.h"
#include "ast/for_each_ast.h"
#include "ast/decl_collector.h"
#include "math/polynomial/algebraic_numbers.h"
@ -1000,6 +1001,18 @@ void ast_smt_pp::display_smt2(std::ostream& strm, expr* n) {
}
}
vector<std::pair<func_decl*, expr*>> recfuns;
recfun::util u(m);
for (auto f : decls.get_rec_decls())
recfuns.push_back({f, u.get_def(f).get_rhs()});
if (!recfuns.empty()) {
smt2_pp_environment_dbg env(m);
ast_smt2_pp_recdefs(strm, recfuns, env);
}
#endif
for (expr* a : m_assumptions) {

View file

@ -33,8 +33,7 @@ class ast2ast_trailmap {
public:
ast2ast_trailmap(ast_manager& m):
m_domain(m),
m_range(m),
m_map()
m_range(m)
{}
bool find(S* s, T*& t) {

View file

@ -927,7 +927,7 @@ sort * bv_util::mk_sort(unsigned bv_size) {
}
unsigned bv_util::get_int2bv_size(parameter const& p) {
int sz;
int sz = 0;
VERIFY(m_plugin->get_int2bv_size(1, &p, sz));
return static_cast<unsigned>(sz);
}
@ -951,4 +951,4 @@ app* bv_util::mk_bv_rotate_left(expr* arg, unsigned n) {
app* bv_util::mk_bv_rotate_right(expr* arg, unsigned n) {
parameter p(n);
return m_manager.mk_app(get_fid(), OP_ROTATE_RIGHT, 1, &p, 1, &arg);
}
}

View file

@ -22,6 +22,7 @@ Notes:
#include "ast/for_each_expr.h"
#include "ast/ast_util.h"
#include "ast/occurs.h"
#include "ast/bv_decl_plugin.h"
#include "ast/rewriter/expr_safe_replace.h"
#include "ast/rewriter/th_rewriter.h"
#include "ast/converters/generic_model_converter.h"
@ -130,6 +131,54 @@ generic_model_converter * generic_model_converter::copy(ast_translation & transl
return res;
}
void generic_model_converter::convert_initialize_value(vector<std::pair<expr_ref, expr_ref>> & var2value) {
if (var2value.empty() || m_entries.empty())
return;
for (unsigned i = 0; i < var2value.size(); ++i) {
auto& [var, value] = var2value[i];
for (auto const& e : m_entries) {
switch (e.m_instruction) {
case HIDE:
break;
case ADD:
if (is_uninterp_const(var) && e.m_f == to_app(var)->get_decl())
convert_initialize_value(e.m_def, i, var2value);
break;
}
}
}
}
void generic_model_converter::convert_initialize_value(expr* def, unsigned i, vector<std::pair<expr_ref, expr_ref>>& var2value) {
// var = if(c, th, el) = value
// th = value => c = true
// el = value => c = false
expr* c = nullptr, *th = nullptr, *el = nullptr;
auto& [var, value] = var2value[i];
if (m.is_ite(def, c, th, el)) {
if (value == th) {
var = c;
value = m.mk_true();
return;
}
if (value == el) {
var = c;
value = m.mk_false();
return;
}
}
// var = def = value
// => def = value
if (is_uninterp(def)) {
var = def;
return;
}
}
void generic_model_converter::set_env(ast_pp_util* visitor) {
if (!visitor) {

View file

@ -37,6 +37,7 @@ private:
vector<entry> m_entries;
expr_ref simplify_def(entry const& e);
void convert_initialize_value(expr* def, unsigned i, vector<std::pair<expr_ref, expr_ref>>& var2value);
public:
generic_model_converter(ast_manager & m, char const* orig) : m(m), m_orig(orig) {}
@ -61,6 +62,8 @@ public:
model_converter * translate(ast_translation & translator) override { return copy(translator); }
void convert_initialize_value(vector<std::pair<expr_ref, expr_ref>>& var2value) override;
generic_model_converter* copy(ast_translation & translator);
void set_env(ast_pp_util* visitor) override;

View file

@ -107,6 +107,12 @@ public:
m_c2->get_units(fmls);
m_c1->get_units(fmls);
}
void convert_initialize_value(vector<std::pair<expr_ref, expr_ref>>& var2value) override {
m_c2->convert_initialize_value(var2value);
m_c1->convert_initialize_value(var2value);
}
char const * get_name() const override { return "concat-model-converter"; }

View file

@ -71,9 +71,6 @@ protected:
void display_del(std::ostream& out, func_decl* f) const;
void display_add(std::ostream& out, ast_manager& m);
public:
model_converter() {}
void set_completion(bool f) { m_completion = f; }
virtual void operator()(model_ref & m) = 0;
@ -86,6 +83,8 @@ public:
virtual void set_env(ast_pp_util* visitor);
virtual void convert_initialize_value(vector<std::pair<expr_ref, expr_ref>> & var2value) { }
/**
\brief we are adding a formula to the context of the model converter.
The operator has as side effect of adding definitions as assertions to the

View file

@ -220,17 +220,33 @@ namespace datatype {
}
namespace decl {
plugin::~plugin() {
finalize();
}
void plugin::finalize() {
for (auto& kv : m_defs) {
dealloc(kv.m_value);
}
for (auto& kv : m_defs)
dealloc(kv.m_value);
m_defs.reset();
m_util = nullptr; // force deletion
reset();
}
void plugin::reset() {
m_datatype2constructors.reset();
m_datatype2nonrec_constructor.reset();
m_constructor2accessors.reset();
m_constructor2recognizer.reset();
m_recognizer2constructor.reset();
m_accessor2constructor.reset();
m_is_recursive.reset();
m_is_enum.reset();
std::for_each(m_vectors.begin(), m_vectors.end(), delete_proc<ptr_vector<func_decl> >());
m_vectors.reset();
dealloc(m_asts);
m_asts = nullptr;
++m_start;
}
util & plugin::u() const {
@ -578,6 +594,7 @@ namespace datatype {
if (m_defs.find(s, d))
dealloc(d);
m_defs.remove(s);
reset();
}
bool plugin::is_value_visit(bool unique, expr * arg, ptr_buffer<app> & todo) const {
@ -799,7 +816,7 @@ namespace datatype {
for (unsigned i = 0; i < n; ++i) {
sort* ps = get_datatype_parameter_sort(s, i);
sz = get_sort_size(params, ps);
m_refs.push_back(sz);
plugin().m_refs.push_back(sz);
S.insert(d.params().get(i), sz);
}
auto ss = d.sort_size();
@ -896,7 +913,7 @@ namespace datatype {
}
TRACE("datatype", tout << "set sort size " << s << "\n";);
d.set_sort_size(param_size::size::mk_plus(s_add));
m_refs.reset();
plugin().m_refs.reset();
}
}
@ -1008,9 +1025,7 @@ namespace datatype {
util::util(ast_manager & m):
m(m),
m_family_id(null_family_id),
m_plugin(nullptr),
m_asts(m),
m_start(0) {
m_plugin(nullptr) {
}
@ -1025,26 +1040,21 @@ namespace datatype {
return m_family_id;
}
util::~util() {
std::for_each(m_vectors.begin(), m_vectors.end(), delete_proc<ptr_vector<func_decl> >());
}
ptr_vector<func_decl> const * util::get_datatype_constructors(sort * ty) {
SASSERT(is_datatype(ty));
ptr_vector<func_decl> * r = nullptr;
if (m_datatype2constructors.find(ty, r))
if (plugin().m_datatype2constructors.find(ty, r))
return r;
r = alloc(ptr_vector<func_decl>);
m_asts.push_back(ty);
m_vectors.push_back(r);
m_datatype2constructors.insert(ty, r);
plugin().add_ast(ty);
plugin().m_vectors.push_back(r);
plugin().m_datatype2constructors.insert(ty, r);
if (!is_declared(ty))
m.raise_exception("datatype constructors have not been created");
def const& d = get_def(ty);
for (constructor const* c : d) {
func_decl_ref f = c->instantiate(ty);
m_asts.push_back(f);
plugin().add_ast(f);
r->push_back(f);
}
return r;
@ -1053,13 +1063,13 @@ namespace datatype {
ptr_vector<func_decl> const * util::get_constructor_accessors(func_decl * con) {
SASSERT(is_constructor(con));
ptr_vector<func_decl> * res = nullptr;
if (m_constructor2accessors.find(con, res)) {
if (plugin().m_constructor2accessors.find(con, res)) {
return res;
}
res = alloc(ptr_vector<func_decl>);
m_asts.push_back(con);
m_vectors.push_back(res);
m_constructor2accessors.insert(con, res);
plugin().add_ast(con);
plugin().m_vectors.push_back(res);
plugin().m_constructor2accessors.insert(con, res);
sort * datatype = con->get_range();
def const& d = get_def(datatype);
for (constructor const* c : d) {
@ -1067,7 +1077,7 @@ namespace datatype {
for (accessor const* a : *c) {
func_decl_ref fn = a->instantiate(datatype);
res->push_back(fn);
m_asts.push_back(fn);
plugin().add_ast(fn);
}
break;
}
@ -1086,7 +1096,7 @@ namespace datatype {
func_decl * util::get_constructor_recognizer(func_decl * con) {
SASSERT(is_constructor(con));
func_decl * d = nullptr;
if (m_constructor2recognizer.find(con, d))
if (plugin().m_constructor2recognizer.find(con, d))
return d;
sort * datatype = con->get_range();
def const& dd = get_def(datatype);
@ -1097,9 +1107,9 @@ namespace datatype {
parameter ps[2] = { parameter(con), parameter(r) };
d = m.mk_func_decl(fid(), OP_DT_RECOGNISER, 2, ps, 1, &datatype);
SASSERT(d);
m_asts.push_back(con);
m_asts.push_back(d);
m_constructor2recognizer.insert(con, d);
plugin().add_ast(con);
plugin().add_ast(d);
plugin().m_constructor2recognizer.insert(con, d);
return d;
}
@ -1120,10 +1130,10 @@ namespace datatype {
bool util::is_recursive(sort * ty) {
SASSERT(is_datatype(ty));
bool r = false;
if (!m_is_recursive.find(ty, r)) {
if (!plugin().m_is_recursive.find(ty, r)) {
r = is_recursive_core(ty);
m_is_recursive.insert(ty, r);
m_asts.push_back(ty);
plugin().m_is_recursive.insert(ty, r);
plugin().add_ast(ty);
}
return r;
}
@ -1147,21 +1157,21 @@ namespace datatype {
if (!is_datatype(s))
return false;
bool r = false;
if (m_is_enum.find(s, r))
if (plugin().m_is_enum.find(s, r))
return r;
ptr_vector<func_decl> const& cnstrs = *get_datatype_constructors(s);
r = true;
for (unsigned i = 0; r && i < cnstrs.size(); ++i)
r = cnstrs[i]->get_arity() == 0;
m_is_enum.insert(s, r);
m_asts.push_back(s);
plugin().m_is_enum.insert(s, r);
plugin().add_ast(s);
return r;
}
func_decl * util::get_accessor_constructor(func_decl * accessor) {
SASSERT(is_accessor(accessor));
func_decl * r = nullptr;
if (m_accessor2constructor.find(accessor, r))
if (plugin().m_accessor2constructor.find(accessor, r))
return r;
sort * datatype = accessor->get_domain(0);
symbol c_id = accessor->get_parameter(1).get_symbol();
@ -1174,26 +1184,15 @@ namespace datatype {
}
}
r = fn;
m_accessor2constructor.insert(accessor, r);
m_asts.push_back(accessor);
m_asts.push_back(r);
plugin().m_accessor2constructor.insert(accessor, r);
plugin().add_ast(accessor);
plugin().add_ast(r);
return r;
}
void util::reset() {
m_datatype2constructors.reset();
m_datatype2nonrec_constructor.reset();
m_constructor2accessors.reset();
m_constructor2recognizer.reset();
m_recognizer2constructor.reset();
m_accessor2constructor.reset();
m_is_recursive.reset();
m_is_enum.reset();
std::for_each(m_vectors.begin(), m_vectors.end(), delete_proc<ptr_vector<func_decl> >());
m_vectors.reset();
m_asts.reset();
++m_start;
plugin().reset();
}
@ -1205,7 +1204,7 @@ namespace datatype {
func_decl * util::get_non_rec_constructor(sort * ty) {
SASSERT(is_datatype(ty));
cnstr_depth cd;
if (m_datatype2nonrec_constructor.find(ty, cd))
if (plugin().m_datatype2nonrec_constructor.find(ty, cd))
return cd.first;
ptr_vector<sort> forbidden_set;
forbidden_set.push_back(ty);
@ -1222,7 +1221,7 @@ namespace datatype {
each T_i is not a datatype or it is a datatype t not in forbidden_set,
and get_non_rec_constructor_core(T_i, forbidden_set union { T_i })
*/
util::cnstr_depth util::get_non_rec_constructor_core(sort * ty, ptr_vector<sort> & forbidden_set) {
cnstr_depth util::get_non_rec_constructor_core(sort * ty, ptr_vector<sort> & forbidden_set) {
// We must select a constructor c(T_1, ..., T_n):T such that
// 1) T_i's are not recursive
// If there is no such constructor, then we select one that
@ -1231,7 +1230,7 @@ namespace datatype {
ptr_vector<func_decl> const& constructors = *get_datatype_constructors(ty);
array_util autil(m);
cnstr_depth result(nullptr, 0);
if (m_datatype2nonrec_constructor.find(ty, result))
if (plugin().m_datatype2nonrec_constructor.find(ty, result))
return result;
TRACE("util_bug", tout << "get-non-rec constructor: " << sort_ref(ty, m) << "\n";
tout << "forbidden: ";
@ -1273,9 +1272,9 @@ namespace datatype {
}
}
if (result.first) {
m_asts.push_back(result.first);
m_asts.push_back(ty);
m_datatype2nonrec_constructor.insert(ty, result);
plugin().add_ast(result.first);
plugin().add_ast(ty);
plugin().m_datatype2nonrec_constructor.insert(ty, result);
}
return result;
}
@ -1291,6 +1290,7 @@ namespace datatype {
IF_VERBOSE(0, verbose_stream() << f->get_name() << "\n");
for (constructor* c : d)
IF_VERBOSE(0, verbose_stream() << "!= " << c->name() << "\n");
return UINT_MAX;
SASSERT(false);
UNREACHABLE();
return 0;

View file

@ -198,6 +198,8 @@ namespace datatype {
def* translate(ast_translation& tr, util& u);
};
typedef std::pair<func_decl*, unsigned> cnstr_depth;
namespace decl {
class plugin : public decl_plugin {
@ -213,6 +215,7 @@ namespace datatype {
void log_axiom_definitions(symbol const& s, sort * new_sort);
public:
plugin(): m_id_counter(0), m_class_id(0), m_has_nested_rec(false) {}
~plugin() override;
@ -259,6 +262,25 @@ namespace datatype {
bool has_nested_rec() const { return m_has_nested_rec; }
void reset();
obj_map<sort, ptr_vector<func_decl>*> m_datatype2constructors;
obj_map<sort, cnstr_depth> m_datatype2nonrec_constructor;
obj_map<func_decl, ptr_vector<func_decl>*> m_constructor2accessors;
obj_map<func_decl, func_decl*> m_constructor2recognizer;
obj_map<func_decl, func_decl*> m_recognizer2constructor;
obj_map<func_decl, func_decl*> m_accessor2constructor;
obj_map<sort, bool> m_is_recursive;
obj_map<sort, bool> m_is_enum;
mutable obj_map<sort, bool> m_is_fully_interp;
mutable ast_ref_vector* m_asts = nullptr;
sref_vector<param_size::size> m_refs;
ptr_vector<ptr_vector<func_decl> > m_vectors;
unsigned m_start = 0;
mutable ptr_vector<sort> m_fully_interp_trail;
void add_ast(ast* a) const { if (!m_asts) m_asts = alloc(ast_ref_vector, *m_manager); m_asts->push_back(a); }
private:
bool is_value_visit(bool unique, expr * arg, ptr_buffer<app> & todo) const;
bool is_value_aux(bool unique, app * arg) const;
@ -295,25 +317,10 @@ namespace datatype {
ast_manager & m;
mutable family_id m_family_id;
mutable decl::plugin* m_plugin;
typedef std::pair<func_decl*, unsigned> cnstr_depth;
family_id fid() const;
obj_map<sort, ptr_vector<func_decl> *> m_datatype2constructors;
obj_map<sort, cnstr_depth> m_datatype2nonrec_constructor;
obj_map<func_decl, ptr_vector<func_decl> *> m_constructor2accessors;
obj_map<func_decl, func_decl *> m_constructor2recognizer;
obj_map<func_decl, func_decl *> m_recognizer2constructor;
obj_map<func_decl, func_decl *> m_accessor2constructor;
obj_map<sort, bool> m_is_recursive;
obj_map<sort, bool> m_is_enum;
mutable obj_map<sort, bool> m_is_fully_interp;
mutable ast_ref_vector m_asts;
sref_vector<param_size::size> m_refs;
ptr_vector<ptr_vector<func_decl> > m_vectors;
unsigned m_start;
mutable ptr_vector<sort> m_fully_interp_trail;
cnstr_depth get_non_rec_constructor_core(sort * ty, ptr_vector<sort> & forbidden_set);
friend class decl::plugin;
@ -331,7 +338,6 @@ namespace datatype {
public:
util(ast_manager & m);
~util();
ast_manager & get_manager() const { return m; }
// sort * mk_datatype_sort(symbol const& name, unsigned n, sort* const* params);
bool is_datatype(sort const* s) const { return is_sort_of(s, fid(), DATATYPE_SORT); }

View file

@ -440,8 +440,9 @@ namespace euf {
TRACE("plugin", tout << "propagate " << eq_id << ": " << eq_pp(*this, m_eqs[eq_id]) << "\n");
// simplify eq using processed
for (auto other_eq : backward_iterator(eq_id))
TRACE("plugin", tout << "backward iterator " << eq_id << " vs " << other_eq << " " << is_processed(other_eq) << "\n");
TRACE("plugin",
for (auto other_eq : backward_iterator(eq_id))
tout << "backward iterator " << eq_id << " vs " << other_eq << " " << is_processed(other_eq) << "\n");
for (auto other_eq : backward_iterator(eq_id))
if (is_processed(other_eq) && backward_simplify(eq_id, other_eq))
goto loop_start;
@ -907,7 +908,6 @@ namespace euf {
m_dst_r.reset();
m_dst_r.append(monomial(dst.r).m_nodes);
unsigned src_r_size = m_src_r.size();
unsigned dst_r_size = m_dst_r.size();
SASSERT(src_r_size == monomial(src.r).size());
// dst_r contains C
// src_r contains E

View file

@ -47,7 +47,6 @@ namespace euf {
unsigned_vector eqs; // equality occurrences
unsigned root_id() const { return root->n->get_id(); }
~node() {}
static node* mk(region& r, enode* n);
};
@ -62,8 +61,7 @@ namespace euf {
node* operator*() { return m_first; }
iterator& operator++() { if (!m_last) m_last = m_first; m_first = m_first->next; return *this; }
iterator operator++(int) { iterator tmp = *this; ++*this; return tmp; }
bool operator==(iterator const& other) const { return m_last == other.m_last && m_first == other.m_first; }
bool operator!=(iterator const& other) const { return !(*this == other); }
bool operator!=(iterator const& other) const { return m_last != other.m_last || m_first != other.m_first; }
};
equiv(node& _n) :n(_n) {}
equiv(node* _n) :n(*_n) {}
@ -270,8 +268,6 @@ namespace euf {
ac_plugin(egraph& g, unsigned fid, unsigned op);
ac_plugin(egraph& g, func_decl* f);
~ac_plugin() override {}
theory_id get_id() const override { return m_fid; }

View file

@ -33,8 +33,6 @@ namespace euf {
public:
arith_plugin(egraph& g);
~arith_plugin() override {}
theory_id get_id() const override { return a.get_family_id(); }
void register_node(enode* n) override;

View file

@ -95,8 +95,6 @@ namespace euf {
public:
bv_plugin(egraph& g);
~bv_plugin() override {}
theory_id get_id() const override { return bv.get_family_id(); }
void register_node(enode* n) override;

View file

@ -107,8 +107,8 @@ namespace euf {
void egraph::update_children(enode* n) {
for (enode* child : enode_args(n))
child->get_root()->add_parent(n);
for (enode* child : enode_args(n))
SASSERT(child->get_root()->m_parents.back() == n);
DEBUG_CODE(for (enode* child : enode_args(n))
SASSERT(child->get_root()->m_parents.back() == n););
m_updates.push_back(update_record(n, update_record::update_children()));
}

View file

@ -280,8 +280,7 @@ namespace euf {
enode* operator*() { return m_first; }
iterator& operator++() { if (!m_last) m_last = m_first; m_first = m_first->m_next; return *this; }
iterator operator++(int) { iterator tmp = *this; ++*this; return tmp; }
bool operator==(iterator const& other) const { return m_last == other.m_last && m_first == other.m_first; }
bool operator!=(iterator const& other) const { return !(*this == other); }
bool operator!=(iterator const& other) const { return m_last != other.m_last || m_first != other.m_first; }
};
enode_class(enode & _n):n(_n) {}
enode_class(enode * _n):n(*_n) {}
@ -300,8 +299,7 @@ namespace euf {
th_var_list const& operator*() { return *m_th_vars; }
iterator& operator++() { m_th_vars = m_th_vars->get_next(); return *this; }
iterator operator++(int) { iterator tmp = *this; ++* this; return tmp; }
bool operator==(iterator const& other) const { return m_th_vars == other.m_th_vars; }
bool operator!=(iterator const& other) const { return !(*this == other); }
bool operator!=(iterator const& other) const { return m_th_vars != other.m_th_vars; }
};
enode_th_vars(enode& _n) :n(_n) {}
enode_th_vars(enode* _n) :n(*_n) {}

View file

@ -34,8 +34,6 @@ namespace euf {
public:
specrel_plugin(egraph& g);
~specrel_plugin() override {}
theory_id get_id() const override { return sp.get_family_id(); }

View file

@ -504,9 +504,6 @@ default_expr2polynomial::default_expr2polynomial(ast_manager & am, polynomial::m
expr2polynomial(am, pm, nullptr) {
}
default_expr2polynomial::~default_expr2polynomial() {
}
bool default_expr2polynomial::is_int(polynomial::var x) const {
return m_is_int[x];
}

View file

@ -102,7 +102,6 @@ class default_expr2polynomial : public expr2polynomial {
bool_vector m_is_int;
public:
default_expr2polynomial(ast_manager & am, polynomial::manager & pm);
~default_expr2polynomial() override;
bool is_int(polynomial::var x) const override;
protected:
polynomial::var mk_var(bool is_int) override;

View file

@ -146,20 +146,16 @@ subterms::iterator& subterms::iterator::operator++() {
return *this;
}
bool subterms::iterator::operator==(iterator const& other) const {
bool subterms::iterator::operator!=(iterator const& other) const {
// ignore state of visited
if (other.m_esp->size() != m_esp->size()) {
return false;
return true;
}
for (unsigned i = m_esp->size(); i-- > 0; ) {
if (m_esp->get(i) != other.m_esp->get(i))
return false;
return true;
}
return true;
}
bool subterms::iterator::operator!=(iterator const& other) const {
return !(*this == other);
return false;
}
@ -216,18 +212,14 @@ subterms_postorder::iterator& subterms_postorder::iterator::operator++() {
return *this;
}
bool subterms_postorder::iterator::operator==(iterator const& other) const {
bool subterms_postorder::iterator::operator!=(iterator const& other) const {
// ignore state of visited
if (other.m_es.size() != m_es.size()) {
return false;
return true;
}
for (unsigned i = m_es.size(); i-- > 0; ) {
if (m_es.get(i) != other.m_es.get(i))
return false;
return true;
}
return true;
}
bool subterms_postorder::iterator::operator!=(iterator const& other) const {
return !(*this == other);
return false;
}

View file

@ -190,7 +190,6 @@ public:
expr* operator*();
iterator operator++(int);
iterator& operator++();
bool operator==(iterator const& other) const;
bool operator!=(iterator const& other) const;
};
@ -220,7 +219,6 @@ public:
expr* operator*();
iterator operator++(int);
iterator& operator++();
bool operator==(iterator const& other) const;
bool operator!=(iterator const& other) const;
};
static subterms_postorder all(expr_ref_vector const& es) { return subterms_postorder(es, true); }

View file

@ -2692,7 +2692,7 @@ void fpa2bv_converter::mk_to_fp_real(func_decl * f, sort * s, expr * rm, expr *
SASSERT(tmp_rat.is_int32());
SASSERT(sz == 3);
mpf_rounding_mode mrm;
mpf_rounding_mode mrm = MPF_ROUND_TOWARD_ZERO;
switch ((BV_RM_VAL)tmp_rat.get_unsigned()) {
case BV_RM_TIES_TO_AWAY: mrm = MPF_ROUND_NEAREST_TAWAY; break;
case BV_RM_TIES_TO_EVEN: mrm = MPF_ROUND_NEAREST_TEVEN; break;

View file

@ -36,9 +36,6 @@ struct fpa2bv_rewriter_cfg : public default_rewriter_cfg {
fpa2bv_rewriter_cfg(ast_manager & m, fpa2bv_converter & c, params_ref const & p);
~fpa2bv_rewriter_cfg() {
}
void cleanup_buffers() {
m_out.finalize();
}

View file

@ -47,9 +47,6 @@ void fpa_decl_plugin::set_manager(ast_manager * m, family_id id) {
m_bv_plugin = static_cast<bv_decl_plugin*>(m_manager->get_plugin(m_bv_fid));
}
fpa_decl_plugin::~fpa_decl_plugin() {
}
unsigned fpa_decl_plugin::mk_id(mpf const & v) {
unsigned new_id = m_id_gen.mk();
m_values.reserve(new_id+1);
@ -961,9 +958,6 @@ fpa_util::fpa_util(ast_manager & m):
m_plugin = static_cast<fpa_decl_plugin*>(m.get_plugin(m_fid));
}
fpa_util::~fpa_util() {
}
sort * fpa_util::mk_float_sort(unsigned ebits, unsigned sbits) {
parameter ps[2] = { parameter(ebits), parameter(sbits) };
return m().mk_sort(m_fid, FLOATING_POINT_SORT, 2, ps);

View file

@ -175,7 +175,6 @@ public:
bool is_float_sort(sort * s) const { return is_sort_of(s, m_family_id, FLOATING_POINT_SORT); }
bool is_rm_sort(sort * s) const { return is_sort_of(s, m_family_id, ROUNDING_MODE_SORT); }
~fpa_decl_plugin() override;
void finalize() override;
decl_plugin * mk_fresh() override;
@ -216,7 +215,6 @@ class fpa_util {
public:
fpa_util(ast_manager & m);
~fpa_util();
ast_manager & m() const { return m_manager; }
mpf_manager & fm() const { return m_plugin->fm(); }

View file

@ -30,7 +30,7 @@ class contains_vars::imp {
void visit(expr * n, unsigned delta, bool & visited) {
expr_delta_pair e(n, delta);
if (!m_cache.contains(e)) {
if (!is_ground(n) && !m_cache.contains(e)) {
m_todo.push_back(e);
visited = false;
}
@ -74,6 +74,7 @@ public:
m_todo.push_back(expr_delta_pair(n, begin));
while (!m_todo.empty()) {
expr_delta_pair e = m_todo.back();
if (visit_children(e.m_node, e.m_delta)) {
m_cache.insert(e);
m_todo.pop_back();

View file

@ -39,10 +39,7 @@ public:
m_weight(weight) {
SASSERT(!m_hint || !m_cond);
}
~cond_macro() {
}
func_decl * get_f() const { return m_f; }
expr * get_def() const { return m_def; }

View file

@ -269,9 +269,6 @@ macro_finder::macro_finder(ast_manager & m, macro_manager & mm):
m_autil(m) {
}
macro_finder::~macro_finder() {
}
bool macro_finder::expand_macros(expr_ref_vector const& exprs, proof_ref_vector const& prs, expr_dependency_ref_vector const& deps, expr_ref_vector & new_exprs, proof_ref_vector & new_prs, expr_dependency_ref_vector & new_deps) {
TRACE("macro_finder", tout << "starting expand_macros:\n";
m_macro_manager.display(tout););

View file

@ -43,7 +43,6 @@ class macro_finder {
public:
macro_finder(ast_manager & m, macro_manager & mm);
~macro_finder();
void operator()(expr_ref_vector const& exprs, proof_ref_vector const& prs, expr_dependency_ref_vector const& deps, expr_ref_vector & new_exprs, proof_ref_vector & new_prs, expr_dependency_ref_vector & new_deps);
void operator()(unsigned n, justified_expr const* fmls, vector<justified_expr>& new_fmls);
};

View file

@ -41,9 +41,6 @@ macro_manager::macro_manager(ast_manager & m):
m_util.set_forbidden_set(&m_forbidden_set);
}
macro_manager::~macro_manager() {
}
void macro_manager::push_scope() {
m_scopes.push_back(scope());
scope & s = m_scopes.back();

View file

@ -64,7 +64,6 @@ class macro_manager {
public:
macro_manager(ast_manager & m);
~macro_manager();
void copy_to(macro_manager& dst);
ast_manager & get_manager() const { return m; }
macro_util & get_util() { return m_util; }

View file

@ -31,9 +31,6 @@ quasi_macros::quasi_macros(ast_manager & m, macro_manager & mm) :
m_new_qsorts(m) {
}
quasi_macros::~quasi_macros() {
}
void quasi_macros::find_occurrences(expr * e) {
unsigned j;
m_todo.reset();

View file

@ -60,7 +60,6 @@ class quasi_macros {
public:
quasi_macros(ast_manager & m, macro_manager & mm);
~quasi_macros();
/**
\brief Find pure function macros and apply them.

View file

@ -57,7 +57,7 @@ struct defined_names::impl {
unsigned_vector m_lims; //!< Backtracking support.
impl(ast_manager & m, char const * prefix);
virtual ~impl();
virtual ~impl() = default;
app * gen_name(expr * e, sort_ref_buffer & var_sorts, buffer<symbol> & var_names);
void cache_new_name(expr * e, app * name);
@ -90,9 +90,6 @@ defined_names::impl::impl(ast_manager & m, char const * prefix):
m_z3name = prefix;
}
defined_names::impl::~impl() {
}
/**
\brief Given an expression \c e that may contain free variables, return an application (sk x_1 ... x_n),
where sk is a fresh variable name, and x_i's are the free variables of \c e.

View file

@ -69,6 +69,7 @@ class skolemizer {
typedef act_cache cache;
ast_manager & m;
var_subst m_subst;
symbol m_sk_hack;
bool m_sk_hack_enabled;
cache m_cache;
@ -128,7 +129,6 @@ class skolemizer {
//
// (VAR 0) should be in the last position of substitution.
//
var_subst s(m);
SASSERT(is_well_sorted(m, q->get_expr()));
expr_ref tmp(m);
expr * body = q->get_expr();
@ -146,7 +146,7 @@ class skolemizer {
}
}
}
r = s(body, substitution);
r = m_subst(body, substitution);
p = nullptr;
if (m_proofs_enabled) {
if (q->get_kind() == forall_k)
@ -159,6 +159,7 @@ class skolemizer {
public:
skolemizer(ast_manager & m):
m(m),
m_subst(m),
m_sk_hack("sk_hack"),
m_sk_hack_enabled(false),
m_cache(m),

View file

@ -41,9 +41,6 @@ expr_pattern_match::expr_pattern_match(ast_manager & manager):
m_manager(manager), m_precompiled(manager) {
}
expr_pattern_match::~expr_pattern_match() {
}
bool
expr_pattern_match::match_quantifier(quantifier* qf, app_ref_vector& patterns, unsigned& weight) {
if (m_regs.empty()) {

View file

@ -116,7 +116,6 @@ class expr_pattern_match {
public:
expr_pattern_match(ast_manager & manager);
~expr_pattern_match();
bool match_quantifier(quantifier * qf, app_ref_vector & patterns, unsigned & weight);
bool match_quantifier_index(quantifier* qf, app_ref_vector & patterns, unsigned& index);
unsigned initialize(quantifier* qf);

View file

@ -114,9 +114,9 @@ class pattern_inference_cfg : public default_rewriter_cfg {
//
class collect {
struct entry {
expr * m_node;
unsigned m_delta;
entry():m_node(nullptr), m_delta(0) {}
expr * m_node = nullptr;
unsigned m_delta = 0;
entry() = default;
entry(expr * n, unsigned d):m_node(n), m_delta(d) {}
unsigned hash() const {
return hash_u_u(m_node->get_id(), m_delta);

View file

@ -230,7 +230,7 @@ public:
<< "New pf: " << mk_pp(newp, m) << "\n";);
}
proof *r;
proof *r = nullptr;
VERIFY(cache.find(pr, r));
DEBUG_CODE(

View file

@ -170,8 +170,6 @@ namespace recfun {
vector<branch> m_branches;
public:
case_state() : m_reg(), m_branches() {}
bool empty() const { return m_branches.empty(); }
branch pop_branch() {
@ -242,23 +240,18 @@ namespace recfun {
{
VERIFY(m_cases.empty() && "cases cannot already be computed");
SASSERT(n_vars == m_domain.size());
TRACEFN("compute cases " << mk_pp(rhs, m));
unsigned case_idx = 0;
std::string name("case-");
name.append(m_name.str());
m_vars.append(n_vars, vars);
m_rhs = rhs;
if (!is_macro)
for (expr* e : subterms::all(m_rhs))
if (is_lambda(e))
throw default_exception("recursive definitions with lambdas are not supported");
unsigned case_idx = 0;
expr_ref_vector conditions(m);
m_vars.append(n_vars, vars);
m_rhs = rhs;
// is the function a macro (unconditional body)?
if (is_macro || n_vars == 0 || !contains_ite(u, rhs)) {
@ -267,7 +260,6 @@ namespace recfun {
return;
}
// analyze control flow of `rhs`, accumulating guards and
// rebuilding a `ite`-free RHS on the fly for each path in `rhs`.
@ -368,9 +360,6 @@ namespace recfun {
m_plugin(dynamic_cast<decl::plugin*>(m.get_plugin(m_fid))) {
}
util::~util() {
}
def * util::decl_fun(symbol const& name, unsigned n, sort *const * domain, sort * range, bool is_generated) {
return alloc(def, m(), m_fid, name, n, domain, range, is_generated);
}
@ -419,7 +408,6 @@ namespace recfun {
}
namespace decl {
plugin::plugin() : decl_plugin(), m_defs(), m_case_defs() {}
plugin::~plugin() { finalize(); }
void plugin::finalize() {

View file

@ -173,7 +173,6 @@ namespace recfun {
void compute_scores(expr* e, obj_map<expr, unsigned>& scores);
public:
plugin();
~plugin() override;
void finalize() override;
@ -238,7 +237,6 @@ namespace recfun {
public:
util(ast_manager &m);
~util();
ast_manager & m() { return m_manager; }
family_id get_family_id() const { return m_fid; }

View file

@ -109,17 +109,20 @@ br_status bv_rewriter::mk_app_core(func_decl * f, unsigned num_args, expr * cons
break;
case OP_BNEG_OVFL:
SASSERT(num_args == 1);
return mk_bvneg_overflow(args[0], result);
st = mk_bvneg_overflow(args[0], result);
break;
case OP_BSHL:
SASSERT(num_args == 2);
return mk_bv_shl(args[0], args[1], result);
st = mk_bv_shl(args[0], args[1], result);
break;
case OP_BLSHR:
SASSERT(num_args == 2);
return mk_bv_lshr(args[0], args[1], result);
st = mk_bv_lshr(args[0], args[1], result);
break;
case OP_BASHR:
SASSERT(num_args == 2);
return mk_bv_ashr(args[0], args[1], result);
st = mk_bv_ashr(args[0], args[1], result);
break;
case OP_BSDIV:
SASSERT(num_args == 2);
return mk_bv_sdiv(args[0], args[1], result);
@ -151,13 +154,16 @@ br_status bv_rewriter::mk_app_core(func_decl * f, unsigned num_args, expr * cons
SASSERT(num_args == 2);
return mk_bv_smod_i(args[0], args[1], result);
case OP_CONCAT:
return mk_concat(num_args, args, result);
st = mk_concat(num_args, args, result);
break;
case OP_EXTRACT:
SASSERT(num_args == 1);
return mk_extract(m_util.get_extract_high(f), m_util.get_extract_low(f), args[0], result);
st = mk_extract(m_util.get_extract_high(f), m_util.get_extract_low(f), args[0], result);
break;
case OP_REPEAT:
SASSERT(num_args == 1);
return mk_repeat(f->get_parameter(0).get_int(), args[0], result);
st = mk_repeat(f->get_parameter(0).get_int(), args[0], result);
break;
case OP_ZERO_EXT:
SASSERT(num_args == 1);
return mk_zero_extend(f->get_parameter(0).get_int(), args[0], result);
@ -596,28 +602,45 @@ br_status bv_rewriter::mk_leq_core(bool is_signed, expr * a, expr * b, expr_ref
//
// a <=_u #x000f
//
unsigned bv_sz = m_util.get_bv_size(b);
unsigned i = bv_sz;
unsigned first_non_zero = UINT_MAX;
while (i > 0) {
--i;
if (!is_zero_bit(b, i)) {
first_non_zero = i;
break;
}
}
unsigned bv_sz = m_util.get_bv_size(a);
auto last_non_zero = [&](expr* x) {
for (unsigned i = bv_sz; i-- > 0; )
if (!is_zero_bit(x, i))
return i;
return UINT_MAX;
};
unsigned lnz = last_non_zero(b);
if (first_non_zero == UINT_MAX) {
if (lnz == UINT_MAX) {
// all bits are zero
result = m.mk_eq(a, mk_zero(bv_sz));
return BR_REWRITE1;
}
else if (first_non_zero < bv_sz - 1 && m_le2extract) {
result = m.mk_and(m.mk_eq(m_mk_extract(bv_sz - 1, first_non_zero + 1, a), mk_zero(bv_sz - first_non_zero - 1)),
m_util.mk_ule(m_mk_extract(first_non_zero, 0, a), m_mk_extract(first_non_zero, 0, b)));
else if (lnz < bv_sz - 1 && m_le2extract) {
// a[sz-1:lnz+1] = 0 & a[lnz:0] <= b[lnz:0]
result = m.mk_and(m.mk_eq(m_mk_extract(bv_sz - 1, lnz + 1, a), mk_zero(bv_sz - lnz - 1)),
m_util.mk_ule(m_mk_extract(lnz, 0, a), m_mk_extract(lnz, 0, b)));
return BR_REWRITE3;
}
lnz = last_non_zero(a);
if (lnz == UINT_MAX) {
// all bits are zero
result = m.mk_true();
return BR_DONE;
}
else if (lnz < bv_sz - 1 && m_le2extract) {
// use the equivalence to simplify:
// #x000f <=_u b <=> b[sz-1:lnz+1] != 0 or #xf <= b[lnz:0])
result = m.mk_implies(m.mk_eq(m_mk_extract(bv_sz - 1, lnz + 1, b), mk_zero(bv_sz - lnz - 1)),
m_util.mk_ule(m_mk_extract(lnz, 0, a), m_mk_extract(lnz, 0, b)));
return BR_REWRITE_FULL;
}
}
#endif
@ -1422,19 +1445,50 @@ br_status bv_rewriter::mk_bv_smod_core(expr * arg1, expr * arg2, bool hi_div0, e
br_status bv_rewriter::mk_int2bv(unsigned bv_size, expr * arg, expr_ref & result) {
numeral val;
bool is_int;
expr* x;
if (m_autil.is_numeral(arg, val, is_int)) {
val = m_util.norm(val, bv_size);
result = mk_numeral(val, bv_size);
return BR_DONE;
}
// (int2bv (bv2int x)) --> x
if (m_util.is_bv2int(arg) && bv_size == get_bv_size(to_app(arg)->get_arg(0))) {
result = to_app(arg)->get_arg(0);
// int2bv (bv2int x) --> x
if (m_util.is_bv2int(arg, x) && bv_size == get_bv_size(x)) {
result = x;
return BR_DONE;
}
// int2bv (bv2int x) --> 0000x
if (m_util.is_bv2int(arg, x) && bv_size > get_bv_size(x)) {
mk_zero_extend(bv_size - get_bv_size(x), x, result);
return BR_REWRITE1;
}
// int2bv (bv2int x) --> x[sz-1:0]
if (m_util.is_bv2int(arg, x) && bv_size < get_bv_size(x)) {
result = m_mk_extract(bv_size - 1, 0, x);
return BR_REWRITE1;
}
#if 0
// int2bv (a + b) --> int2bv(a) + int2bv(b)
if (m_autil.is_add(arg)) {
expr_ref_vector args(m);
for (expr* e : *to_app(arg))
args.push_back(m_util.mk_int2bv(bv_size, e));
result = m_util.mk_bv_add(args);
return BR_REWRITE3;
}
// int2bv (a * b) --> int2bv(a) * int2bv(b)
if (m_autil.is_mul(arg)) {
expr_ref_vector args(m);
for (expr* e : *to_app(arg))
args.push_back(m_util.mk_int2bv(bv_size, e));
result = m_util.mk_bv_mul(args);
return BR_REWRITE3;
}
#endif
return BR_FAILED;
}
@ -2717,6 +2771,27 @@ bool bv_rewriter::is_urem_any(expr * e, expr * & dividend, expr * & divisor) {
return true;
}
br_status bv_rewriter::mk_eq_bv2int(expr* lhs, expr* rhs, expr_ref& result) {
rational r;
expr* x, *y;
if (m_autil.is_numeral(lhs))
std::swap(lhs, rhs);
if (m_autil.is_numeral(rhs, r) && m_util.is_bv2int(lhs, x)) {
unsigned bv_size = m_util.get_bv_size(x);
if (0 <= r && r < rational::power_of_two(bv_size))
result = m.mk_eq(m_util.mk_numeral(r, bv_size), x);
else
result = m.mk_false();
return BR_REWRITE1;
}
if (m_util.is_bv2int(lhs, x) && m_util.is_bv2int(rhs, y)) {
result = m.mk_eq(x, y);
return BR_REWRITE1;
}
return BR_FAILED;
}
br_status bv_rewriter::mk_eq_core(expr * lhs, expr * rhs, expr_ref & result) {
if (lhs == rhs) {
result = m.mk_true();
@ -2760,6 +2835,7 @@ br_status bv_rewriter::mk_eq_core(expr * lhs, expr * rhs, expr_ref & result) {
return st;
}
if (m_blast_eq_value) {
st = mk_blast_eq_value(lhs, rhs, result);
if (st != BR_FAILED)

View file

@ -203,6 +203,7 @@ public:
bool is_urem_any(expr * e, expr * & dividend, expr * & divisor);
br_status mk_eq_core(expr * lhs, expr * rhs, expr_ref & result);
br_status mk_eq_bv2int(expr* lhs, expr* rhs, expr_ref& result);
br_status mk_ite_core(expr * c, expr * t, expr * e, expr_ref & result);
br_status mk_distinct(unsigned num_args, expr * const * args, expr_ref & result);

View file

@ -103,11 +103,10 @@ public:
m_first = false;
return *this;
}
bool operator==(const iterator& o) {
bool operator!=(const iterator& o) const {
SASSERT(&m_ouf == &o.m_ouf);
return m_first == o.m_first && m_curr_id == o.m_curr_id;
return m_first != o.m_first || m_curr_id != o.m_curr_id;
}
bool operator!=(const iterator& o) {return !(*this == o);}
};
iterator begin(OBJ*o) {
@ -152,11 +151,10 @@ public:
m_ouf.m_uf.is_root(m_rootnb) != true);
return *this;
}
bool operator==(const equiv_iterator& o) {
bool operator!=(const equiv_iterator& o) const {
SASSERT(&m_ouf == &o.m_ouf);
return m_rootnb == o.m_rootnb;
return m_rootnb != o.m_rootnb;
}
bool operator!=(const equiv_iterator& o) {return !(*this == o);}
};
equiv_iterator begin() {return equiv_iterator(*this, 0);}

View file

@ -74,6 +74,7 @@ bool simplify_inj_axiom(ast_manager & m, quantifier * q, expr_ref & result) {
}
}
if (found_vars && !has_free_vars(q)) {
(void)num_vars;
TRACE("inj_axiom",
tout << "Cadidate for simplification:\n" << mk_ll_pp(q, m) << mk_pp(app1, m) << "\n" << mk_pp(app2, m) << "\n" <<
mk_pp(var1, m) << "\n" << mk_pp(var2, m) << "\nnum_vars: " << num_vars << "\n";);

View file

@ -26,8 +26,6 @@ label_rewriter::label_rewriter(ast_manager & m) :
m_label_fid(m.get_label_family_id()),
m_rwr(m, false, *this) {}
label_rewriter::~label_rewriter() {}
br_status label_rewriter::reduce_app(
func_decl * f, unsigned num, expr * const * args, expr_ref & result,
proof_ref & result_pr) {

View file

@ -27,7 +27,6 @@ class label_rewriter : public default_rewriter_cfg {
rewriter_tpl<label_rewriter> m_rwr;
public:
label_rewriter(ast_manager & m);
~label_rewriter();
br_status reduce_app(func_decl * f, unsigned num, expr * const * args, expr_ref & result,
proof_ref & result_pr);

View file

@ -346,8 +346,6 @@ public:
ast_manager & m() const { return this->m_manager; }
Config & cfg() { return m_cfg; }
Config const & cfg() const { return m_cfg; }
~rewriter_tpl() override {};
void reset();
void cleanup();

View file

@ -2060,6 +2060,10 @@ br_status seq_rewriter::mk_seq_replace_all(expr* a, expr* b, expr* c, expr_ref&
result = m().mk_ite(str().mk_is_empty(b), str().mk_empty(a->get_sort()), c);
return BR_REWRITE2;
}
if (str().is_empty(a) && str().is_empty(c)) {
result = a;
return BR_DONE;
}
zstring s1, s2;
expr_ref_vector strs(m());
if (str().is_string(a, s1) && str().is_string(b, s2)) {

View file

@ -59,6 +59,7 @@ struct th_rewriter_cfg : public default_rewriter_cfg {
bv_util m_bv_util;
der m_der;
expr_safe_replace m_rep;
unused_vars_eliminator m_elim_unused_vars;
expr_ref_vector m_pinned;
// substitution support
expr_dependency_ref m_used_dependencies; // set of dependencies of used substitutions
@ -685,9 +686,18 @@ struct th_rewriter_cfg : public default_rewriter_cfg {
st = m_seq_rw.mk_eq_core(a, b, result);
if (st != BR_FAILED)
return st;
st = extended_bv_eq(a, b, result);
if (st != BR_FAILED)
return st;
return apply_tamagotchi(a, b, result);
}
br_status extended_bv_eq(expr* a, expr* b, expr_ref& result) {
if (m_bv_util.is_bv2int(a) || m_bv_util.is_bv2int(b))
return m_bv_rw.mk_eq_bv2int(a, b, result);
return BR_FAILED;
}
expr_ref mk_eq(expr* a, expr* b) {
expr_ref result(m());
br_status st = reduce_eq(a, b, result);
@ -820,8 +830,7 @@ struct th_rewriter_cfg : public default_rewriter_cfg {
}
}
SASSERT(old_q->get_sort() == q1->get_sort());
result = elim_unused_vars(m(), q1, params_ref());
result = m_elim_unused_vars(q1);
result_pr = nullptr;
@ -878,6 +887,7 @@ struct th_rewriter_cfg : public default_rewriter_cfg {
m_bv_util(m),
m_der(m),
m_rep(m),
m_elim_unused_vars(m, params_ref()),
m_pinned(m),
m_used_dependencies(m) {
updt_local_params(p);

View file

@ -52,6 +52,24 @@ expr_ref var_subst::operator()(expr * n, unsigned num_args, expr * const * args)
rep(n, result);
return result;
}
if (is_app(n) && all_of(*to_app(n), [&](expr* arg) { return is_ground(arg) || is_var(arg); })) {
ptr_buffer<expr> new_args;
for (auto arg : *to_app(n)) {
if (is_ground(arg))
new_args.push_back(arg);
else {
unsigned idx = to_var(arg)->get_idx();
expr* new_arg = nullptr;
if (idx < num_args)
new_arg = m_std_order ? args[num_args - idx - 1] : args[idx];
if (!new_arg)
new_arg = arg;
new_args.push_back(new_arg);
}
}
result = m.mk_app(to_app(n)->get_decl(), new_args.size(), new_args.data());
return result;
}
SASSERT(is_well_sorted(result.m(), n));
m_reducer.reset();
if (m_std_order)

View file

@ -94,7 +94,7 @@ class expr_free_vars {
ptr_vector<sort> m_sorts;
ptr_vector<expr> m_todo;
public:
expr_free_vars() {}
expr_free_vars() = default;
expr_free_vars(expr* e) { (*this)(e); }
void reset();
void operator()(expr* e);

View file

@ -446,7 +446,7 @@ public:
/*
Default constructor of invalid info.
*/
info() {}
info() = default;
/*
Used for constructing either an invalid info that is only used to indicate uninitialized entry, or valid but unknown info value.

View file

@ -187,8 +187,8 @@ expr_ref dominator_simplifier::simplify_and_or(bool is_and, app * e) {
}
expr_ref dominator_simplifier::simplify_not(app * e) {
expr *ee;
ENSURE(m.is_not(e, ee));
expr *ee = nullptr;
VERIFY(m.is_not(e, ee));
unsigned old_lvl = scope_level();
expr_ref t = simplify_rec(ee);
local_pop(scope_level() - old_lvl);

View file

@ -35,7 +35,7 @@ private:
mpz * m_as; // precise coefficients
double * m_approx_as; // approximated coefficients
var * m_xs; // var ids
linear_equation() {}
linear_equation() = default;
public:
unsigned size() const { return m_size; }
mpz const & a(unsigned idx) const { SASSERT(idx < m_size); return m_as[idx]; }

View file

@ -375,8 +375,6 @@ public:
m_bv(m)
{}
~reduce_args_simplifier() override {}
char const* name() const override { return "reduce-args"; }
void collect_statistics(statistics& st) const override {

View file

@ -28,10 +28,6 @@ bvsls_opt_engine::bvsls_opt_engine(ast_manager & m, params_ref const & p) :
m_best_model = alloc(model, m);
}
bvsls_opt_engine::~bvsls_opt_engine()
{
}
bvsls_opt_engine::optimization_result bvsls_opt_engine::optimize(
expr_ref const & objective,
model_ref initial_model,

View file

@ -31,7 +31,6 @@ class bvsls_opt_engine : public sls_engine {
public:
bvsls_opt_engine(ast_manager & m, params_ref const & p);
~bvsls_opt_engine();
class optimization_result {
public:

View file

@ -31,7 +31,7 @@ namespace sls {
unsigned nw = 0;
unsigned mask = 0;
bvect() {}
bvect() = default;
bvect(unsigned sz) : svector(sz, (unsigned)0) {}
void set_bw(unsigned bw);

View file

@ -318,6 +318,22 @@ UNARY_CMD(echo_cmd, "echo", "<string>", "display the given string", CPK_STRING,
else
ctx.regular_stream() << arg << std::endl;);
class set_initial_value_cmd : public cmd {
expr* m_var = nullptr, *m_value = nullptr;
public:
set_initial_value_cmd(): cmd("set-initial-value") {}
char const* get_usage() const override { return "<var> <value>"; }
char const* get_descr(cmd_context& ctx) const override { return "set an initial value for search as a hint to the solver"; }
unsigned get_arity() const override { return 2; }
void prepare(cmd_context& ctx) override { m_var = m_value = nullptr; }
cmd_arg_kind next_arg_kind(cmd_context& ctx) const override { return CPK_EXPR; }
void set_next_arg(cmd_context& ctx, expr* e) override { if (m_var) m_value = e; else m_var = e; }
void execute(cmd_context& ctx) override {
SASSERT(m_var && m_value);
ctx.set_initial_value(m_var, m_value);
}
};
class set_get_option_cmd : public cmd {
protected:
symbol m_true;
@ -893,6 +909,7 @@ void install_basic_cmds(cmd_context & ctx) {
ctx.insert(alloc(get_option_cmd));
ctx.insert(alloc(get_info_cmd));
ctx.insert(alloc(set_info_cmd));
ctx.insert(alloc(set_initial_value_cmd));
ctx.insert(alloc(get_consequences_cmd));
ctx.insert(alloc(builtin_cmd, "assert", "<term>", "assert term."));
ctx.insert(alloc(builtin_cmd, "check-sat", "<boolean-constants>*", "check if the current context is satisfiable. If a list of boolean constants B is provided, then check if the current context is consistent with assigning every constant in B to true."));

Some files were not shown because too many files have changed in this diff Show more