diff --git a/.github/workflows/android-build.yml b/.github/workflows/android-build.yml index dcc40db0e..c2ea7c860 100644 --- a/.github/workflows/android-build.yml +++ b/.github/workflows/android-build.yml @@ -32,7 +32,7 @@ jobs: tar -cvf z3-build-${{ matrix.android-abi }}.tar *.jar *.so - name: Archive production artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: android-build-${{ matrix.android-abi }} path: build/z3-build-${{ matrix.android-abi }}.tar diff --git a/.github/workflows/ask.lock.yml b/.github/workflows/ask.lock.yml index c4425a643..19f9a99f2 100644 --- a/.github/workflows/ask.lock.yml +++ b/.github/workflows/ask.lock.yml @@ -1223,7 +1223,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -1329,7 +1329,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -2277,7 +2277,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2814,7 +2814,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: question-answering-researcher.log path: /tmp/question-answering-researcher.log diff --git a/.github/workflows/ci-doctor.lock.yml b/.github/workflows/ci-doctor.lock.yml index c75fd661c..903da1c30 100644 --- a/.github/workflows/ci-doctor.lock.yml +++ b/.github/workflows/ci-doctor.lock.yml @@ -808,7 +808,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -911,7 +911,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -1859,7 +1859,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2396,7 +2396,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: ci-failure-doctor.log path: /tmp/ci-failure-doctor.log diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 2aeb7d286..2c02dabf2 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -89,13 +89,13 @@ jobs: id: date run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v5 with: name: coverage-${{steps.date.outputs.date}} path: ${{github.workspace}}/coverage.html retention-days: 4 - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v5 with: name: coverage-details-${{steps.date.outputs.date}} path: ${{env.COV_DETAILS_PATH}} diff --git a/.github/workflows/daily-backlog-burner.lock.yml b/.github/workflows/daily-backlog-burner.lock.yml index 355ca9a78..5dfd11104 100644 --- a/.github/workflows/daily-backlog-burner.lock.yml +++ b/.github/workflows/daily-backlog-burner.lock.yml @@ -747,7 +747,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -856,7 +856,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -1804,7 +1804,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2341,7 +2341,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: daily-backlog-burner.log path: /tmp/daily-backlog-burner.log @@ -2435,7 +2435,7 @@ jobs: fi - name: Upload git patch if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw.patch path: /tmp/aw.patch @@ -2946,7 +2946,7 @@ jobs: steps: - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: aw.patch path: /tmp/ diff --git a/.github/workflows/daily-perf-improver.lock.yml b/.github/workflows/daily-perf-improver.lock.yml index 41448b626..266ef1b2e 100644 --- a/.github/workflows/daily-perf-improver.lock.yml +++ b/.github/workflows/daily-perf-improver.lock.yml @@ -822,7 +822,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -931,7 +931,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -1879,7 +1879,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2416,7 +2416,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: daily-perf-improver.log path: /tmp/daily-perf-improver.log @@ -2510,7 +2510,7 @@ jobs: fi - name: Upload git patch if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw.patch path: /tmp/aw.patch @@ -3021,7 +3021,7 @@ jobs: steps: - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: aw.patch path: /tmp/ diff --git a/.github/workflows/daily-test-improver.lock.yml b/.github/workflows/daily-test-improver.lock.yml index e001ab7df..8c7acc85d 100644 --- a/.github/workflows/daily-test-improver.lock.yml +++ b/.github/workflows/daily-test-improver.lock.yml @@ -797,7 +797,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -906,7 +906,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -1854,7 +1854,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2391,7 +2391,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: daily-test-coverage-improver.log path: /tmp/daily-test-coverage-improver.log @@ -2485,7 +2485,7 @@ jobs: fi - name: Upload git patch if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw.patch path: /tmp/aw.patch @@ -2996,7 +2996,7 @@ jobs: steps: - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: aw.patch path: /tmp/ diff --git a/.github/workflows/pr-fix.lock.yml b/.github/workflows/pr-fix.lock.yml index 87e8b10c9..2e2679e64 100644 --- a/.github/workflows/pr-fix.lock.yml +++ b/.github/workflows/pr-fix.lock.yml @@ -1251,7 +1251,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -1360,7 +1360,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -2308,7 +2308,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2845,7 +2845,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: pr-fix.log path: /tmp/pr-fix.log @@ -2939,7 +2939,7 @@ jobs: fi - name: Upload git patch if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw.patch path: /tmp/aw.patch @@ -3371,7 +3371,7 @@ jobs: steps: - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: aw.patch path: /tmp/ diff --git a/MODULE.bazel b/MODULE.bazel index 48848d27e..985a66b8e 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -1,6 +1,6 @@ module( name = "z3", - version = "4.15.4", # TODO: Read from VERSION.txt - currently manual sync required + version = "4.15.5", # TODO: Read from VERSION.txt - currently manual sync required bazel_compatibility = [">=7.0.0"], ) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 78c5cddbf..1efabaea6 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -7,6 +7,18 @@ Version 4.next - CDCL core for SMT queries. It extends the SAT engine with theory solver plugins. - add global incremental pre-processing for the legacy core. +Version 4.15.4 +============== +- Add methods to create polymorphic datatype constructors over the API. The prior method was that users had to manage + parametricity using their own generation of instances. The updated API allows to work with polymorphic datatype declarations + directly. +- MSVC build by default respect security flags, https://github.com/Z3Prover/z3/pull/7988 +- Using a new algorithm for smt.threads=k, k > 1 using a shared search tree. Thanks to Ilana Shapiro. +- Thanks for several pull requests improving usability, including + - https://github.com/Z3Prover/z3/pull/7955 + - https://github.com/Z3Prover/z3/pull/7995 + - https://github.com/Z3Prover/z3/pull/7947 + Version 4.15.3 ============== - Add UserPropagator callback option for quantifier instantiations. It allows the user propagator to diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 0bf2aef61..520d1d172 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -49,7 +49,8 @@ jobs: timeoutInMinutes: 90 pool: vmImage: "ubuntu-latest" - container: "quay.io/pypa/manylinux2014_x86_64:latest" + container: "quay.io/pypa/manylinux_2_34_x86_64:latest" + condition: eq(1,1) steps: - script: "/opt/python/cp38-cp38/bin/python -m venv $PWD/env" - script: 'echo "##vso[task.prependpath]$PWD/env/bin"' @@ -66,7 +67,6 @@ jobs: pool: vmImage: "ubuntu-latest" container: "quay.io/pypa/manylinux2014_x86_64:latest" - condition: eq(0,1) steps: - script: curl -L -o /tmp/arm-toolchain.tar.xz 'https://developer.arm.com/-/media/Files/downloads/gnu/11.2-2022.02/binrel/gcc-arm-11.2-2022.02-x86_64-aarch64-none-linux-gnu.tar.xz?rev=33c6e30e5ac64e6dba8f0431f2c35f1b&hash=9918A05BF47621B632C7A5C8D2BB438FB80A4480' - script: mkdir -p /tmp/arm-toolchain/ diff --git a/examples/c++/example.cpp b/examples/c++/example.cpp index c3902dfff..2bb5510e4 100644 --- a/examples/c++/example.cpp +++ b/examples/c++/example.cpp @@ -1024,14 +1024,17 @@ void polymorphic_datatype_example() { symbol is_pair_name = ctx.str_symbol("is-pair"); symbol first_name = ctx.str_symbol("first"); symbol second_name = ctx.str_symbol("second"); - + symbol field_names[2] = {first_name, second_name}; - sort field_sorts[2] = {alpha, beta}; // Use type variables + sort _field_sorts[2] = {alpha, beta}; + sort_vector field_sorts(ctx); + field_sorts.push_back(alpha); // Use type variables + field_sorts.push_back(beta); // Use type variables constructors cs(ctx); - cs.add(mk_pair_name, is_pair_name, 2, field_names, field_sorts); - sort pair = ctx.datatype(pair_name, cs); - + cs.add(mk_pair_name, is_pair_name, 2, field_names, _field_sorts); + sort pair = ctx.datatype(pair_name, field_sorts, cs); + std::cout << "Created parametric datatype: " << pair << "\n"; // Instantiate Pair with concrete types: (Pair Int Real) diff --git a/param_sweep_deterministic.sh b/param_sweep_deterministic.sh new file mode 100755 index 000000000..2fe659431 --- /dev/null +++ b/param_sweep_deterministic.sh @@ -0,0 +1,94 @@ +#!/usr/bin/env bash +set -euo pipefail + +# --- Inputs --- +FILE=$(realpath "$1") +Z3=$(realpath "$2") +N="${3:-99999999}" # optional limit on number of tests + +TIMEOUT=100 +OUT="../sweep_results.csv" + +echo "id,params,result,time_s" > "$OUT" + +# --- Tunable parameters --- +BOOL_PARAMS=( + "smt.arith.nl.branching" + "smt.arith.nl.cross_nested" + "smt.arith.nl.expensive_patching" + "smt.arith.nl.gb" + "smt.arith.nl.horner" + "smt.arith.nl.optimize_bounds" + "smt.arith.nl.propagate_linear_monomials" + "smt.arith.nl.tangents" +) + +INT_PARAM1_KEY="smt.arith.nl.delay" +INT_PARAM1_LO=5 +INT_PARAM1_HI=10 + +INT_PARAM2_KEY="smt.arith.nl.horner_frequency" +INT_PARAM2_LO=2 +INT_PARAM2_HI=6 + +id=1 + +# --- Deterministic nested loops over all parameter values --- +for b0 in 0 1; do +for b1 in 0 1; do +for b2 in 0 1; do +for b3 in 0 1; do +for b4 in 0 1; do +for b5 in 0 1; do +for b6 in 0 1; do +for b7 in 0 1; do + + for d in $(seq "$INT_PARAM1_LO" "$INT_PARAM1_HI"); do + for hf in $(seq "$INT_PARAM2_LO" "$INT_PARAM2_HI"); do + + # stop early if N reached + if (( id > N )); then + echo "✓ Done early at $((id-1)) combinations. Results in $OUT" + exit 0 + fi + + BOOLS=($b0 $b1 $b2 $b3 $b4 $b5 $b6 $b7) + + PARAMS=() + for idx in "${!BOOL_PARAMS[@]}"; do + PARAMS+=("${BOOL_PARAMS[$idx]}=${BOOLS[$idx]}") + done + PARAMS+=("$INT_PARAM1_KEY=$d") + PARAMS+=("$INT_PARAM2_KEY=$hf") + + PARAM_STR=$(IFS=, ; echo "${PARAMS[*]}") + + printf "[%05d] %s\n" "$id" "$PARAM_STR" + + # ----- Run Z3 ----- + START=$(date +%s%N) + if timeout "$TIMEOUT" "$Z3" \ + -v:1 -st \ + "$FILE" \ + smt.threads=2 \ + tactic.default_tactic=smt \ + smt_parallel.param_tuning=false \ + smt_parallel.tunable_params="$PARAM_STR" \ + >/tmp/z3_out.txt 2>&1 + then + RESULT=$(grep -E "sat|unsat|unknown" /tmp/z3_out.txt | tail -1) + [[ -z "$RESULT" ]] && RESULT="unknown" + else + RESULT="timeout" + fi + END=$(date +%s%N) + TIME=$(awk "BEGIN{print ($END-$START)/1e9}") + + echo "$id,\"$PARAM_STR\",$RESULT,$TIME" >> "$OUT" + + ((id++)) + + done # hf + done # d + +done; done; done; done; done; done; done; done diff --git a/param_sweep_random.sh b/param_sweep_random.sh new file mode 100755 index 000000000..3ead8019c --- /dev/null +++ b/param_sweep_random.sh @@ -0,0 +1,96 @@ +#!/usr/bin/env bash +set -euo pipefail + +# --- Inputs --- +FILE=$(realpath "$1") +Z3=$(realpath "$2") +N="${3:-2}" + +TIMEOUT=100 +OUT="../sweep_results.csv" + +echo "id,params,result,time_s" > "$OUT" + +# --- Tunable parameters --- +BOOL_PARAMS=( + "smt.arith.nl.branching" + "smt.arith.nl.cross_nested" + "smt.arith.nl.expensive_patching" + "smt.arith.nl.gb" + "smt.arith.nl.horner" + "smt.arith.nl.optimize_bounds" + "smt.arith.nl.propagate_linear_monomials" + "smt.arith.nl.tangents" +) + +INT_PARAMS=( + "smt.arith.nl.delay:5:10" + "smt.arith.nl.horner_frequency:2:6" +) + +# --- Helpers --- +random_bool() { echo $((RANDOM % 2)); } + +random_int() { + local lo=$1 + local hi=$2 + echo $((lo + RANDOM % (hi - lo + 1))) +} + +# --- Track used parameter combinations --- +declare -A SEEN + +i=1 +while (( i <= N )); do + + # ----- generate a unique parameter string ----- + while true; do + PARAMS=() + + for p in "${BOOL_PARAMS[@]}"; do + PARAMS+=("$p=$(random_bool)") + done + + for spec in "${INT_PARAMS[@]}"; do + IFS=':' read -r key lo hi <<<"$spec" + PARAMS+=("$key=$(random_int "$lo" "$hi")") + done + + PARAM_STR=$(IFS=, ; echo "${PARAMS[*]}") + + # Check uniqueness + if [[ -z "${SEEN[$PARAM_STR]+x}" ]]; then + SEEN["$PARAM_STR"]=1 + break + fi + done + + printf "[%03d/%03d] %s\n" "$i" "$N" "$PARAM_STR" + + # ----- run Z3 ----- + START=$(date +%s%N) + + if timeout "$TIMEOUT" "$Z3" \ + -v:1 -st \ + "$FILE" \ + smt.threads=2 \ + tactic.default_tactic=smt \ + smt_parallel.param_tuning=false \ + smt_parallel.tunable_params="$PARAM_STR" \ + >/tmp/z3_out.txt 2>&1 + then + RESULT=$(grep -E "sat|unsat|unknown" /tmp/z3_out.txt | tail -1) + [[ -z "$RESULT" ]] && RESULT="unknown" + else + RESULT="timeout" + fi + + END=$(date +%s%N) + TIME=$(awk "BEGIN{print ($END-$START)/1e9}") + + echo "$i,\"$PARAM_STR\",$RESULT,$TIME" >> "$OUT" + + ((i++)) +done + +echo "✓ Done. Results written to $OUT" diff --git a/scripts/VERSION.txt b/scripts/VERSION.txt index 6baf7570c..79c398614 100644 --- a/scripts/VERSION.txt +++ b/scripts/VERSION.txt @@ -1 +1 @@ -4.15.4.0 +4.15.5.0 diff --git a/scripts/nightly.yaml b/scripts/nightly.yaml index 6618a301c..a86e6536d 100644 --- a/scripts/nightly.yaml +++ b/scripts/nightly.yaml @@ -2,7 +2,7 @@ variables: # Version components read from VERSION.txt (updated manually when VERSION.txt changes) Major: '4' Minor: '15' - Patch: '4' + Patch: '5' ReleaseVersion: $(Major).$(Minor).$(Patch) NightlyVersion: $(Major).$(Minor).$(Patch).$(Build.BuildId) # TODO: Auto-read from VERSION.txt when Azure DevOps supports it better @@ -365,17 +365,17 @@ stages: inputs: artifactName: 'WindowsBuild-x86' targetPath: $(Agent.TempDirectory) - - task: DownloadPipelineArtifact@2 - displayName: 'Download ManyLinux Build' - inputs: - artifactName: 'ManyLinuxPythonBuildAMD64' - targetPath: $(Agent.TempDirectory) +# - task: DownloadPipelineArtifact@2 +# displayName: 'Download ManyLinux Build' +# inputs: +# artifactName: 'ManyLinuxPythonBuildAMD64' +# targetPath: $(Agent.TempDirectory) - task: DownloadPipelineArtifact@2 displayName: 'Download ManyLinux Arm64 Build' inputs: artifactName: 'ManyLinuxPythonBuildArm64' targetPath: $(Agent.TempDirectory) - - script: cd $(Agent.TempDirectory); mkdir osx-x64-bin; cd osx-x64-bin; unzip ../*x64-osx*.zip +# - script: cd $(Agent.TempDirectory); mkdir osx-x64-bin; cd osx-x64-bin; unzip ../*x64-osx*.zip - script: cd $(Agent.TempDirectory); mkdir osx-arm64-bin; cd osx-arm64-bin; unzip ../*arm64-osx*.zip # - script: cd $(Agent.TempDirectory); mkdir musl-bin; cd musl-bin; unzip ../*-linux.zip - script: cd $(Agent.TempDirectory); mkdir win32-bin; cd win32-bin; unzip ../*x86-win*.zip diff --git a/scripts/release.yml b/scripts/release.yml index 506295525..5c88c89ae 100644 --- a/scripts/release.yml +++ b/scripts/release.yml @@ -6,7 +6,7 @@ trigger: none variables: - ReleaseVersion: '4.15.4' # TODO: Auto-read from VERSION.txt when Azure DevOps supports it better + ReleaseVersion: '4.15.5' # TODO: Auto-read from VERSION.txt when Azure DevOps supports it better stages: @@ -476,7 +476,7 @@ stages: - job: NuGetPublish - condition: eq(1,1) + condition: eq(0,1) displayName: "Publish to NuGet.org" steps: - task: DownloadPipelineArtifact@2 diff --git a/src/api/api_seq.cpp b/src/api/api_seq.cpp index 2b87ef290..cf199af41 100644 --- a/src/api/api_seq.cpp +++ b/src/api/api_seq.cpp @@ -293,6 +293,9 @@ extern "C" { MK_TERNARY(Z3_mk_seq_extract, mk_c(c)->get_seq_fid(), OP_SEQ_EXTRACT, SKIP); MK_TERNARY(Z3_mk_seq_replace, mk_c(c)->get_seq_fid(), OP_SEQ_REPLACE, SKIP); + MK_TERNARY(Z3_mk_seq_replace_all, mk_c(c)->get_seq_fid(), OP_SEQ_REPLACE_ALL, SKIP); + MK_TERNARY(Z3_mk_seq_replace_re, mk_c(c)->get_seq_fid(), OP_SEQ_REPLACE_RE, SKIP); + MK_TERNARY(Z3_mk_seq_replace_re_all, mk_c(c)->get_seq_fid(), OP_SEQ_REPLACE_RE_ALL, SKIP); MK_BINARY(Z3_mk_seq_at, mk_c(c)->get_seq_fid(), OP_SEQ_AT, SKIP); MK_BINARY(Z3_mk_seq_nth, mk_c(c)->get_seq_fid(), OP_SEQ_NTH, SKIP); MK_UNARY(Z3_mk_seq_length, mk_c(c)->get_seq_fid(), OP_SEQ_LENGTH, SKIP); diff --git a/src/api/c++/z3++.h b/src/api/c++/z3++.h index 71f3ff79b..2acb010cb 100644 --- a/src/api/c++/z3++.h +++ b/src/api/c++/z3++.h @@ -327,6 +327,15 @@ namespace z3 { */ sort datatype(symbol const& name, constructors const& cs); + /** + \brief Create a parametric recursive datatype. + \c name is the name of the recursive datatype + \c params - the sort parameters of the datatype + \c cs - the \c n constructors used to define the datatype + References to the datatype and mutually recursive datatypes can be created using \ref datatype_sort. + */ + sort datatype(symbol const &name, sort_vector const ¶ms, constructors const &cs); + /** \brief Create a set of mutually recursive datatypes. \c n - number of recursive datatypes @@ -3616,6 +3625,16 @@ namespace z3 { return sort(*this, s); } + inline sort context::datatype(symbol const &name, sort_vector const& params, constructors const &cs) { + array _params(params); + array _cs(cs.size()); + for (unsigned i = 0; i < cs.size(); ++i) + _cs[i] = cs[i]; + Z3_sort s = Z3_mk_polymorphic_datatype(*this, name, _params.size(), _params.ptr(), cs.size(), _cs.ptr()); + check_error(); + return sort(*this, s); + } + inline sort_vector context::datatypes( unsigned n, symbol const* names, constructor_list *const* cons) { diff --git a/src/api/java/Context.java b/src/api/java/Context.java index 691ecd737..9a8218537 100644 --- a/src/api/java/Context.java +++ b/src/api/java/Context.java @@ -2226,6 +2226,15 @@ public class Context implements AutoCloseable { return (IntExpr)Expr.create(this, Native.mkSeqIndex(nCtx(), s.getNativeObject(), substr.getNativeObject(), offset.getNativeObject())); } + /** + * Extract the last index of sub-string. + */ + public final IntExpr mkLastIndexOf(Expr> s, Expr> substr) + { + checkContextMatch(s, substr); + return (IntExpr)Expr.create(this, Native.mkSeqLastIndex(nCtx(), s.getNativeObject(), substr.getNativeObject())); + } + /** * Replace the first occurrence of src by dst in s. */ @@ -2235,6 +2244,33 @@ public class Context implements AutoCloseable { return (SeqExpr) Expr.create(this, Native.mkSeqReplace(nCtx(), s.getNativeObject(), src.getNativeObject(), dst.getNativeObject())); } + /** + * Replace all occurrences of src by dst in s. + */ + public final SeqExpr mkReplaceAll(Expr> s, Expr> src, Expr> dst) + { + checkContextMatch(s, src, dst); + return (SeqExpr) Expr.create(this, Native.mkSeqReplaceAll(nCtx(), s.getNativeObject(), src.getNativeObject(), dst.getNativeObject())); + } + + /** + * Replace the first occurrence of regular expression re with dst in s. + */ + public final SeqExpr mkReplaceRe(Expr> s, ReExpr> re, Expr> dst) + { + checkContextMatch(s, re, dst); + return (SeqExpr) Expr.create(this, Native.mkSeqReplaceRe(nCtx(), s.getNativeObject(), re.getNativeObject(), dst.getNativeObject())); + } + + /** + * Replace all occurrences of regular expression re with dst in s. + */ + public final SeqExpr mkReplaceReAll(Expr> s, ReExpr> re, Expr> dst) + { + checkContextMatch(s, re, dst); + return (SeqExpr) Expr.create(this, Native.mkSeqReplaceReAll(nCtx(), s.getNativeObject(), re.getNativeObject(), dst.getNativeObject())); + } + /** * Convert a regular expression that accepts sequence s. */ diff --git a/src/api/java/Sort.java b/src/api/java/Sort.java index f612b9031..4910338f3 100644 --- a/src/api/java/Sort.java +++ b/src/api/java/Sort.java @@ -144,6 +144,8 @@ public class Sort extends AST return new SeqSort<>(ctx, obj); case Z3_RE_SORT: return new ReSort<>(ctx, obj); + case Z3_CHAR_SORT: + return new CharSort(ctx, obj); default: throw new Z3Exception("Unknown sort kind"); } diff --git a/src/api/z3_api.h b/src/api/z3_api.h index baa2fa34c..c5d3933ca 100644 --- a/src/api/z3_api.h +++ b/src/api/z3_api.h @@ -3800,6 +3800,27 @@ extern "C" { */ Z3_ast Z3_API Z3_mk_seq_replace(Z3_context c, Z3_ast s, Z3_ast src, Z3_ast dst); + /** + \brief Replace all occurrences of \c src with \c dst in \c s. + + def_API('Z3_mk_seq_replace_all', AST ,(_in(CONTEXT), _in(AST), _in(AST), _in(AST))) + */ + Z3_ast Z3_API Z3_mk_seq_replace_all(Z3_context c, Z3_ast s, Z3_ast src, Z3_ast dst); + + /** + \brief Replace the first occurrence of regular expression \c re with \c dst in \c s. + + def_API('Z3_mk_seq_replace_re', AST ,(_in(CONTEXT), _in(AST), _in(AST), _in(AST))) + */ + Z3_ast Z3_API Z3_mk_seq_replace_re(Z3_context c, Z3_ast s, Z3_ast re, Z3_ast dst); + + /** + \brief Replace all occurrences of regular expression \c re with \c dst in \c s. + + def_API('Z3_mk_seq_replace_re_all', AST ,(_in(CONTEXT), _in(AST), _in(AST), _in(AST))) + */ + Z3_ast Z3_API Z3_mk_seq_replace_re_all(Z3_context c, Z3_ast s, Z3_ast re, Z3_ast dst); + /** \brief Retrieve from \c s the unit sequence positioned at position \c index. The sequence is empty if the index is out of bounds. diff --git a/src/ast/datatype_decl_plugin.cpp b/src/ast/datatype_decl_plugin.cpp index 5bb918c5f..d0c74bd50 100644 --- a/src/ast/datatype_decl_plugin.cpp +++ b/src/ast/datatype_decl_plugin.cpp @@ -300,18 +300,12 @@ namespace datatype { TRACE(datatype, tout << "expected sort parameter at position " << i << " got: " << s << "\n";); throw invalid_datatype(); } - // Allow type variables as parameters for polymorphic datatypes - sort* param_sort = to_sort(s.get_ast()); - if (!m_manager->is_type_var(param_sort) && param_sort->get_family_id() == null_family_id) { - // Type variables and concrete sorts are allowed, but not other uninterpreted sorts - // Actually, all sorts should be allowed including uninterpreted ones - } } sort* s = m_manager->mk_sort(name.get_symbol(), sort_info(m_family_id, k, num_parameters, parameters, true)); def* d = nullptr; - if (m_defs.find(s->get_name(), d) && d->sort_size()) { + if (m_defs.find(s->get_name(), d) && d->sort_size() && d->params().size() == num_parameters - 1) { obj_map S; for (unsigned i = 0; i + 1 < num_parameters; ++i) { sort* r = to_sort(parameters[i + 1].get_ast()); diff --git a/src/ast/euf/ho_matcher.h b/src/ast/euf/ho_matcher.h index 007bdea2c..65477078c 100644 --- a/src/ast/euf/ho_matcher.h +++ b/src/ast/euf/ho_matcher.h @@ -100,14 +100,14 @@ namespace euf { class match_goals { protected: - ast_manager &m; ho_matcher& ho; + ast_manager &m; match_goal* m_expensive = nullptr, *m_cheap = nullptr; match_goal* pop(match_goal*& q); public: - match_goals(ho_matcher& em, ast_manager &m) : m(m), ho(em) {} + match_goals(ho_matcher& em, ast_manager& m) : ho(em), m(m) {} bool empty() const { return m_cheap == nullptr && m_expensive == nullptr; } void reset() { m_cheap = m_expensive = nullptr; } void push(unsigned level, unsigned offset, expr_ref const& pat, expr_ref const& t); @@ -158,7 +158,6 @@ namespace euf { }; class unitary_patterns { - ast_manager& m; array_util a; vector m_patterns; vector> m_is_unitary; @@ -181,7 +180,7 @@ namespace euf { } public: - unitary_patterns(ast_manager& m) : m(m), a(m) {} + unitary_patterns(ast_manager& m) : a(m) {} bool is_unitary(unsigned offset, expr* p) const { return find(offset, p) == l_true; diff --git a/src/ast/simplifiers/bound_simplifier.cpp b/src/ast/simplifiers/bound_simplifier.cpp index b5e927961..3ae3a1a01 100644 --- a/src/ast/simplifiers/bound_simplifier.cpp +++ b/src/ast/simplifiers/bound_simplifier.cpp @@ -135,7 +135,12 @@ bool bound_simplifier::reduce_arg(expr* arg, expr_ref& result) { } void bound_simplifier::reduce() { - + + #if 0 + smt_params_helper sp(p); + if (!sp.bound_simplifier()) + return; + #endif bool updated = true, found_bound = false; for (unsigned i = 0; i < 5 && updated; ++i) { updated = false; diff --git a/src/nlsat/nlsat_solver.cpp b/src/nlsat/nlsat_solver.cpp index bad981011..084e3a479 100644 --- a/src/nlsat/nlsat_solver.cpp +++ b/src/nlsat/nlsat_solver.cpp @@ -1116,7 +1116,7 @@ namespace nlsat { void log_lemma(std::ostream& out, unsigned n, literal const* cls, bool is_valid) { ++m_lemma_count; - out << "(set-logic NRA)\n"; + out << "(set-logic ALL)\n"; if (is_valid) { display_smt2_bool_decls(out); display_smt2_arith_decls(out); diff --git a/src/params/smt_parallel_params.pyg b/src/params/smt_parallel_params.pyg index 9a81f6c25..d1e2e567d 100644 --- a/src/params/smt_parallel_params.pyg +++ b/src/params/smt_parallel_params.pyg @@ -21,5 +21,8 @@ def_module_params('smt_parallel', ('cubetree', BOOL, False, 'use cube tree data structure for storing cubes'), ('searchtree', BOOL, False, 'use search tree implementation (parallel2)'), ('inprocessing', BOOL, False, 'integrate in-processing as a heuristic simplification'), - ('inprocessing_delay', UINT, 0, 'number of undef before invoking simplification') + ('inprocessing_delay', UINT, 0, 'number of undef before invoking simplification'), + ('param_tuning', BOOL, False, 'whether to tune params online during solving'), + ('enable_parallel_smt', BOOL, True, 'whether to run the parallel solver (set to FALSE to test param tuning only)'), + ('tunable_params', STRING, '', 'comma-separated key=value list for online param tuning, e.g. \"smt.arith.nl.horner=false,smt.arith.nl.delay=8\"') )) \ No newline at end of file diff --git a/src/smt/smt_parallel.cpp b/src/smt/smt_parallel.cpp index ee2d06cdb..12bd54786 100644 --- a/src/smt/smt_parallel.cpp +++ b/src/smt/smt_parallel.cpp @@ -148,7 +148,8 @@ namespace smt { m_param_state = best_param_state; auto p = apply_param_values(m_param_state); b.set_param_state(p); - IF_VERBOSE(1, verbose_stream() << " PARAM TUNER found better param state\n"); + IF_VERBOSE(1, verbose_stream() << " PARAM TUNER found better param state:\n"); + print_param_values(m_param_state); } else { IF_VERBOSE(1, verbose_stream() << " PARAM TUNER retained current param state\n"); @@ -163,11 +164,9 @@ namespace smt { m_param_state.push_back({symbol("smt.arith.nl.expensive_patching"), smtp.arith_nl_expensive_patching()}); m_param_state.push_back({symbol("smt.arith.nl.gb"), smtp.arith_nl_grobner()}); m_param_state.push_back({symbol("smt.arith.nl.horner"), smtp.arith_nl_horner()}); - m_param_state.push_back({symbol("smt.arith.nl.horner_frequency"), unsigned_value({smtp.arith_nl_horner_frequency(), 2, 6}) - }); + m_param_state.push_back({symbol("smt.arith.nl.horner_frequency"), unsigned_value({smtp.arith_nl_horner_frequency(), 2, 6})}); m_param_state.push_back({symbol("smt.arith.nl.optimize_bounds"), smtp.arith_nl_optimize_bounds()}); - m_param_state.push_back( - {symbol("smt.arith.nl.propagate_linear_monomials"), smtp.arith_nl_propagate_linear_monomials()}); + m_param_state.push_back({symbol("smt.arith.nl.propagate_linear_monomials"), smtp.arith_nl_propagate_linear_monomials()}); m_param_state.push_back({symbol("smt.arith.nl.tangents"), smtp.arith_nl_tangents()}); }; @@ -217,33 +216,35 @@ namespace smt { } void parallel::param_generator::protocol_iteration() { - IF_VERBOSE(1, verbose_stream() << " PARAM TUNER running protocol iteration\n"); - - ctx->get_fparams().m_max_conflicts = m_max_prefix_conflicts; - lbool r = run_prefix_step(); + while (!m.limit().is_canceled()) { + IF_VERBOSE(1, verbose_stream() << " PARAM TUNER running protocol iteration\n"); + + ctx->get_fparams().m_max_conflicts = m_max_prefix_conflicts; + lbool r = run_prefix_step(); - if (m.limit().is_canceled()) - return; + if (m.limit().is_canceled()) + return; - switch (r) { - case l_undef: { - replay_proof_prefixes(); - break; - } - case l_true: { - IF_VERBOSE(1, verbose_stream() << " PARAM TUNER found formula sat\n"); - model_ref mdl; - ctx->get_model(mdl); - b.set_sat(m_l2g, *mdl); - break; - } - case l_false: { - expr_ref_vector const &unsat_core = ctx->unsat_core(); - IF_VERBOSE(2, verbose_stream() << " unsat core:\n"; - for (auto c : unsat_core) verbose_stream() << mk_bounded_pp(c, m, 3) << "\n"); - IF_VERBOSE(1, verbose_stream() << " PARAM TUNER determined formula unsat\n"); - b.set_unsat(m_l2g, unsat_core); - break; + switch (r) { + case l_undef: { + replay_proof_prefixes(); + break; + } + case l_true: { + IF_VERBOSE(1, verbose_stream() << " PARAM TUNER found formula sat\n"); + model_ref mdl; + ctx->get_model(mdl); + b.set_sat(m_l2g, *mdl); + break; + } + case l_false: { + expr_ref_vector const &unsat_core = ctx->unsat_core(); + IF_VERBOSE(2, verbose_stream() << " unsat core:\n"; + for (auto c : unsat_core) verbose_stream() << mk_bounded_pp(c, m, 3) << "\n"); + IF_VERBOSE(1, verbose_stream() << " PARAM TUNER determined formula unsat\n"); + b.set_unsat(m_l2g, unsat_core); + break; + } } } } @@ -698,27 +699,87 @@ namespace smt { m_batch_manager.initialize(); m_workers.reset(); + + smt_parallel_params pp(ctx.m_params); + m_should_tune_params = pp.param_tuning(); + m_should_run_parallel = pp.enable_parallel_smt(); scoped_limits sl(m.limit()); flet _nt(ctx.m_fparams.m_threads, 1); - m_param_generator = alloc(param_generator, *this); - SASSERT(num_threads > 1); - for (unsigned i = 0; i < num_threads; ++i) - m_workers.push_back(alloc(worker, i, *this, asms)); - - for (auto w : m_workers) - sl.push_child(&(w->limit())); - - sl.push_child(&(m_param_generator->limit())); + if (m_should_run_parallel) { + SASSERT(num_threads > 1); + for (unsigned i = 0; i < num_threads; ++i) + m_workers.push_back(alloc(worker, i, *this, asms)); + + for (auto w : m_workers) + sl.push_child(&(w->limit())); + } + + if (m_should_tune_params) { + m_param_generator = alloc(param_generator, *this); + sl.push_child(&(m_param_generator->limit())); + } + + std::string tuned = pp.tunable_params(); + if (!tuned.empty()) { + auto trim = [](std::string &s) { + s.erase(0, s.find_first_not_of(" \t\n\r")); + s.erase(s.find_last_not_of(" \t\n\r") + 1); + }; + + std::stringstream ss(tuned); + std::string kv; + + while (std::getline(ss, kv, ',')) { + size_t eq = kv.find('='); + if (eq == std::string::npos) + continue; + + std::string key = kv.substr(0, eq); + std::string val = kv.substr(eq + 1); + trim(key); + trim(val); + + if (val == "true" || val == "1") { + ctx.m_params.set_bool(symbol(key.c_str()), true); + } + else if (val == "false" || val == "0") { + ctx.m_params.set_bool(symbol(key.c_str()), false); + } + else if (std::all_of(val.begin(), val.end(), ::isdigit)) { + ctx.m_params.set_uint(symbol(key.c_str()), + static_cast(std::stoul(val))); + } else { + IF_VERBOSE(1, + verbose_stream() << "Ignoring invalid parameter override: " << kv << "\n";); + } + } + + IF_VERBOSE(1, + verbose_stream() << "Applied parameter overrides:\n"; + ctx.m_params.display(verbose_stream()); + ); + } // Launch threads - vector threads(num_threads + 1); // +1 for param generator - for (unsigned i = 0; i < num_threads; ++i) { - threads[i] = std::thread([&, i]() { m_workers[i]->run(); }); + // threads must live beyond the branch scope so we declare them here. + vector threads; + if (m_should_run_parallel) { + threads.resize(m_should_tune_params ? num_threads + 1 : num_threads); // +1 for param generator + for (unsigned i = 0; i < num_threads; ++i) { + threads[i] = std::thread([&, i]() { m_workers[i]->run(); }); + } + // the final thread runs the parameter generator + if (m_should_tune_params) { + threads[num_threads] = std::thread([&]() { m_param_generator->protocol_iteration(); }); + } + } else { // just do param tuning (if requested) + if (m_should_tune_params) { + threads.resize(1); + threads[0] = std::thread([&]() { m_param_generator->protocol_iteration(); }); + } } - // the final thread runs the parameter generator - threads[num_threads] = std::thread([&]() { m_param_generator->protocol_iteration(); }); // Wait for all threads to finish for (auto &th : threads) diff --git a/src/smt/smt_parallel.h b/src/smt/smt_parallel.h index 9d7834a25..9d3c92195 100644 --- a/src/smt/smt_parallel.h +++ b/src/smt/smt_parallel.h @@ -36,6 +36,8 @@ namespace smt { class parallel { context& ctx; unsigned num_threads; + bool m_should_tune_params; + bool m_should_run_parallel; struct shared_clause { unsigned source_worker_id; @@ -72,8 +74,8 @@ namespace smt { obj_hashtable shared_clause_set; // for duplicate filtering on per-thread clause expressions void cancel_background_threads() { - cancel_workers(); - cancel_param_generator(); + if (p.m_should_run_parallel) cancel_workers(); + if (p.m_should_tune_params) cancel_param_generator(); } // called from batch manager to cancel other workers if we've reached a verdict @@ -146,6 +148,19 @@ namespace smt { params_ref apply_param_values(param_values const &pv); void init_param_state(); param_values mutate_param_state(); + void print_param_values(param_values const &pv) { + for (auto const &kv : pv) { + std::visit([&](auto&& arg) { + using T = std::decay_t; + if constexpr (std::is_same_v) { + IF_VERBOSE(1, verbose_stream() << kv.first << "=" << arg.value << " "); + } else if constexpr (std::is_same_v) { + IF_VERBOSE(1, verbose_stream() << kv.first << "=" << (arg ? "true" : "false") << " "); + } + }, kv.second); + } + IF_VERBOSE(1, verbose_stream() << "\n"); + } public: param_generator(parallel &p); diff --git a/src/test/polynomial_factorization.cpp b/src/test/polynomial_factorization.cpp index a7aa9af84..5efab2cd9 100644 --- a/src/test/polynomial_factorization.cpp +++ b/src/test/polynomial_factorization.cpp @@ -207,7 +207,189 @@ void test_factorization_gcd() { VERIFY(nm.eq(gcd_result[1], mpz(1))); } +void test_factorization_large_multivariate_missing_factors() { + std::cout << "test_factorization_large_multivariate_missing_factors\n"; + + reslimit rl; + numeral_manager nm; + manager m(rl, nm); + + polynomial_ref x0(m); + polynomial_ref x1(m); + polynomial_ref x2(m); + x0 = m.mk_polynomial(m.mk_var()); + x1 = m.mk_polynomial(m.mk_var()); + x2 = m.mk_polynomial(m.mk_var()); + + struct term_t { + int coeff; + unsigned e0; + unsigned e1; + unsigned e2; + }; + + /* + - x2^8 - x1 x2^7 - x0 x2^7 + 48 x2^7 + 2 x1^2 x2^6 + x0 x1 x2^6 + 132 x1 x2^6 + 2 x0^2 x2^6 + 132 x0 x2^6 + - 144 x2^6 + 2 x1^3 x2^5 + 6 x0 x1^2 x2^5 + 180 x1^2 x2^5 + 6 x0^2 x1 x2^5 + 432 x0 x1 x2^5 - + 864 x1 x2^5 + 2 x0^3 x2^5 + 180 x0^2 x2^5 - 864 x0 x2^5 - x1^4 x2^4 + 2 x0 x1^3 x2^4 + + 156 x1^3 x2^4 + 3 x0^2 x1^2 x2^4 + 684 x0 x1^2 x2^4 - 1620 x1^2 x2^4 + 2 x0^3 x1 x2^4 + 684 x0^2 x1 x2^4 - + 4536 x0 x1 x2^4 - x0^4 x2^4 + 156 x0^3 x2^4 - 1620 x0^2 x2^4 - x1^5 x2^3 - 5 x0 x1^4 x2^3 + 60 x1^4 x2^3 - + 7 x0^2 x1^3 x2^3 + 600 x0 x1^3 x2^3 - 900 x1^3 x2^3 - 7 x0^3 x1^2 x2^3 + 1080 x0^2 x1^2 x2^3 - 7452 x0 x1^2 x2^3 - + 5 x0^4 x1 x2^3 + 600 x0^3 x1 x2^3 - 7452 x0^2 x1 x2^3 - x0^5 x2^3 + 60 x0^4 x2^3 - 900 x0^3 x2^3 - 3 x0 x1^5 x2^2 - + 9 x0^2 x1^4 x2^2 + 216 x0 x1^4 x2^2 - 13 x0^3 x1^3 x2^2 + 828 x0^2 x1^3 x2^2 - 3780 x0 x1^3 x2^2 - 9 x0^4 x1^2 x2^2 + + 828 x0^3 x1^2 x2^2 - 11016 x0^2 x1^2 x2^2 - 3 x0^5 x1 x2^2 + 216 x0^4 x1 x2^2 - 3780 x0^3 x1 x2^2 - 3 x0^2 x1^5 x2 - + 7 x0^3 x1^4 x2 + 252 x0^2 x1^4 x2 - 7 x0^4 x1^3 x2 + 480 x0^3 x1^3 x2 - 5184 x0^2 x1^3 x2 - 3 x0^5 x1^2 x2 + + 252 x0^4 x1^2 x2 - 5184 x0^3 x1^2 x2 - x0^3 x1^5 - 2 x0^4 x1^4 + 96 x0^3 x1^4 - x0^5 x1^3 + 96 x0^4 x1^3 - 2304 x0^3 x1^3 + */ + static const term_t terms[] = { + { -1, 0u, 0u, 8u }, + { -1, 0u, 1u, 7u }, + { -1, 1u, 0u, 7u }, + { 48, 0u, 0u, 7u }, + { 2, 0u, 2u, 6u }, + { 1, 1u, 1u, 6u }, + { 132, 0u, 1u, 6u }, + { 2, 2u, 0u, 6u }, + { 132, 1u, 0u, 6u }, + { -144, 0u, 0u, 6u }, + { 2, 0u, 3u, 5u }, + { 6, 1u, 2u, 5u }, + { 180, 0u, 2u, 5u }, + { 6, 2u, 1u, 5u }, + { 432, 1u, 1u, 5u }, + { -864, 0u, 1u, 5u }, + { 2, 3u, 0u, 5u }, + { 180, 2u, 0u, 5u }, + { -864, 1u, 0u, 5u }, + { -1, 0u, 4u, 4u }, + { 2, 1u, 3u, 4u }, + { 156, 0u, 3u, 4u }, + { 3, 2u, 2u, 4u }, + { 684, 1u, 2u, 4u }, + { -1620, 0u, 2u, 4u }, + { 2, 3u, 1u, 4u }, + { 684, 2u, 1u, 4u }, + { -4536, 1u, 1u, 4u }, + { -1, 4u, 0u, 4u }, + { 156, 3u, 0u, 4u }, + { -1620, 2u, 0u, 4u }, + { -1, 0u, 5u, 3u }, + { -5, 1u, 4u, 3u }, + { 60, 0u, 4u, 3u }, + { -7, 2u, 3u, 3u }, + { 600, 1u, 3u, 3u }, + { -900, 0u, 3u, 3u }, + { -7, 3u, 2u, 3u }, + { 1080, 2u, 2u, 3u }, + { -7452, 1u, 2u, 3u }, + { -5, 4u, 1u, 3u }, + { 600, 3u, 1u, 3u }, + { -7452, 2u, 1u, 3u }, + { -1, 5u, 0u, 3u }, + { 60, 4u, 0u, 3u }, + { -900, 3u, 0u, 3u }, + { -3, 1u, 5u, 2u }, + { -9, 2u, 4u, 2u }, + { 216, 1u, 4u, 2u }, + { -13, 3u, 3u, 2u }, + { 828, 2u, 3u, 2u }, + { -3780, 1u, 3u, 2u }, + { -9, 4u, 2u, 2u }, + { 828, 3u, 2u, 2u }, + { -11016, 2u, 2u, 2u }, + { -3, 5u, 1u, 2u }, + { 216, 4u, 1u, 2u }, + { -3780, 3u, 1u, 2u }, + { -3, 2u, 5u, 1u }, + { -7, 3u, 4u, 1u }, + { 252, 2u, 4u, 1u }, + { -7, 4u, 3u, 1u }, + { 480, 3u, 3u, 1u }, + { -5184, 2u, 3u, 1u }, + { -3, 5u, 2u, 1u }, + { 252, 4u, 2u, 1u }, + { -5184, 3u, 2u, 1u }, + { -1, 3u, 5u, 0u }, + { -2, 4u, 4u, 0u }, + { 96, 3u, 4u, 0u }, + { -1, 5u, 3u, 0u }, + { 96, 4u, 3u, 0u }, + { -2304, 3u, 3u, 0u }, + }; + + polynomial_ref p(m); + p = m.mk_zero(); + + for (const auto & term : terms) { + polynomial_ref t(m); + t = m.mk_const(rational(term.coeff)); + if (term.e0 != 0) { + t = t * (x0 ^ term.e0); + } + if (term.e1 != 0) { + t = t * (x1 ^ term.e1); + } + if (term.e2 != 0) { + t = t * (x2 ^ term.e2); + } + p = p + t; + } + + factors fs(m); + factor(p, fs); + VERIFY(fs.distinct_factors() == 2); // indeed there are 3 factors, that is demonstrated by the loop + for (unsigned i = 0; i < fs.distinct_factors(); i++) { + polynomial_ref f(m); + f = fs[i]; + if (degree(f, x1)<= 1) continue; + factors fs0(m); + factor(f, fs0); + VERIFY(fs0.distinct_factors() >= 2); + } + + polynomial_ref reconstructed(m); + fs.multiply(reconstructed); + VERIFY(eq(reconstructed, p)); +} + +void test_factorization_multivariate_missing_factors() { + std::cout << "test_factorization_multivariate_missing_factors\n"; + + reslimit rl; + numeral_manager nm; + manager m(rl, nm); + + polynomial_ref x0(m); + polynomial_ref x1(m); + x0 = m.mk_polynomial(m.mk_var()); + x1 = m.mk_polynomial(m.mk_var()); + + polynomial_ref p(m); + p = (x0 + x1) * (x0 + (2 * x1)) * (x0 + (3 * x1)); + + factors fs(m); + factor(p, fs); + + // Multivariate factorization stops after returning the whole polynomial. + VERIFY(fs.distinct_factors() == 1); + VERIFY(m.degree(fs[0], 0) == 3); + + factors fs_refined(m); + polynomial_ref residual = fs[0]; + factor(residual, fs_refined); + + // A second attempt still fails to expose the linear factors. + VERIFY(fs_refined.distinct_factors() == 1); // actually we need 3 factors + VERIFY(m.degree(fs_refined[0], 0) == 3); // actually we need degree 1 + + polynomial_ref reconstructed(m); + fs.multiply(reconstructed); + VERIFY(eq(reconstructed, p)); +} + void test_polynomial_factorization() { + test_factorization_large_multivariate_missing_factors(); + test_factorization_multivariate_missing_factors(); test_factorization_basic(); test_factorization_irreducible(); test_factorization_cubic(); @@ -221,4 +403,4 @@ void test_polynomial_factorization() { void tst_polynomial_factorization() { polynomial::test_polynomial_factorization(); -} \ No newline at end of file +}