diff --git a/.github/workflows/android-build.yml b/.github/workflows/android-build.yml index dcc40db0e..c2ea7c860 100644 --- a/.github/workflows/android-build.yml +++ b/.github/workflows/android-build.yml @@ -32,7 +32,7 @@ jobs: tar -cvf z3-build-${{ matrix.android-abi }}.tar *.jar *.so - name: Archive production artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: android-build-${{ matrix.android-abi }} path: build/z3-build-${{ matrix.android-abi }}.tar diff --git a/.github/workflows/ask.lock.yml b/.github/workflows/ask.lock.yml index c4425a643..19f9a99f2 100644 --- a/.github/workflows/ask.lock.yml +++ b/.github/workflows/ask.lock.yml @@ -1223,7 +1223,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -1329,7 +1329,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -2277,7 +2277,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2814,7 +2814,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: question-answering-researcher.log path: /tmp/question-answering-researcher.log diff --git a/.github/workflows/ci-doctor.lock.yml b/.github/workflows/ci-doctor.lock.yml index c75fd661c..903da1c30 100644 --- a/.github/workflows/ci-doctor.lock.yml +++ b/.github/workflows/ci-doctor.lock.yml @@ -808,7 +808,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -911,7 +911,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -1859,7 +1859,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2396,7 +2396,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: ci-failure-doctor.log path: /tmp/ci-failure-doctor.log diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 2aeb7d286..2c02dabf2 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -89,13 +89,13 @@ jobs: id: date run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v5 with: name: coverage-${{steps.date.outputs.date}} path: ${{github.workspace}}/coverage.html retention-days: 4 - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v5 with: name: coverage-details-${{steps.date.outputs.date}} path: ${{env.COV_DETAILS_PATH}} diff --git a/.github/workflows/daily-backlog-burner.lock.yml b/.github/workflows/daily-backlog-burner.lock.yml index 355ca9a78..5dfd11104 100644 --- a/.github/workflows/daily-backlog-burner.lock.yml +++ b/.github/workflows/daily-backlog-burner.lock.yml @@ -747,7 +747,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -856,7 +856,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -1804,7 +1804,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2341,7 +2341,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: daily-backlog-burner.log path: /tmp/daily-backlog-burner.log @@ -2435,7 +2435,7 @@ jobs: fi - name: Upload git patch if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw.patch path: /tmp/aw.patch @@ -2946,7 +2946,7 @@ jobs: steps: - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: aw.patch path: /tmp/ diff --git a/.github/workflows/daily-perf-improver.lock.yml b/.github/workflows/daily-perf-improver.lock.yml index 41448b626..266ef1b2e 100644 --- a/.github/workflows/daily-perf-improver.lock.yml +++ b/.github/workflows/daily-perf-improver.lock.yml @@ -822,7 +822,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -931,7 +931,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -1879,7 +1879,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2416,7 +2416,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: daily-perf-improver.log path: /tmp/daily-perf-improver.log @@ -2510,7 +2510,7 @@ jobs: fi - name: Upload git patch if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw.patch path: /tmp/aw.patch @@ -3021,7 +3021,7 @@ jobs: steps: - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: aw.patch path: /tmp/ diff --git a/.github/workflows/daily-test-improver.lock.yml b/.github/workflows/daily-test-improver.lock.yml index e001ab7df..8c7acc85d 100644 --- a/.github/workflows/daily-test-improver.lock.yml +++ b/.github/workflows/daily-test-improver.lock.yml @@ -797,7 +797,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -906,7 +906,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -1854,7 +1854,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2391,7 +2391,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: daily-test-coverage-improver.log path: /tmp/daily-test-coverage-improver.log @@ -2485,7 +2485,7 @@ jobs: fi - name: Upload git patch if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw.patch path: /tmp/aw.patch @@ -2996,7 +2996,7 @@ jobs: steps: - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: aw.patch path: /tmp/ diff --git a/.github/workflows/pr-fix.lock.yml b/.github/workflows/pr-fix.lock.yml index 87e8b10c9..2e2679e64 100644 --- a/.github/workflows/pr-fix.lock.yml +++ b/.github/workflows/pr-fix.lock.yml @@ -1251,7 +1251,7 @@ jobs: .write(); - name: Upload agentic run info if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw_info.json path: /tmp/aw_info.json @@ -1360,7 +1360,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY - name: Upload agentic output file if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: safe_output.jsonl path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} @@ -2308,7 +2308,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GITHUB_AW_AGENT_OUTPUT - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: agent_output.json path: ${{ env.GITHUB_AW_AGENT_OUTPUT }} @@ -2845,7 +2845,7 @@ jobs: main(); - name: Upload agent logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: pr-fix.log path: /tmp/pr-fix.log @@ -2939,7 +2939,7 @@ jobs: fi - name: Upload git patch if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: aw.patch path: /tmp/aw.patch @@ -3371,7 +3371,7 @@ jobs: steps: - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: aw.patch path: /tmp/ diff --git a/MODULE.bazel b/MODULE.bazel index 48848d27e..985a66b8e 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -1,6 +1,6 @@ module( name = "z3", - version = "4.15.4", # TODO: Read from VERSION.txt - currently manual sync required + version = "4.15.5", # TODO: Read from VERSION.txt - currently manual sync required bazel_compatibility = [">=7.0.0"], ) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 78c5cddbf..1efabaea6 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -7,6 +7,18 @@ Version 4.next - CDCL core for SMT queries. It extends the SAT engine with theory solver plugins. - add global incremental pre-processing for the legacy core. +Version 4.15.4 +============== +- Add methods to create polymorphic datatype constructors over the API. The prior method was that users had to manage + parametricity using their own generation of instances. The updated API allows to work with polymorphic datatype declarations + directly. +- MSVC build by default respect security flags, https://github.com/Z3Prover/z3/pull/7988 +- Using a new algorithm for smt.threads=k, k > 1 using a shared search tree. Thanks to Ilana Shapiro. +- Thanks for several pull requests improving usability, including + - https://github.com/Z3Prover/z3/pull/7955 + - https://github.com/Z3Prover/z3/pull/7995 + - https://github.com/Z3Prover/z3/pull/7947 + Version 4.15.3 ============== - Add UserPropagator callback option for quantifier instantiations. It allows the user propagator to diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 0bf2aef61..520d1d172 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -49,7 +49,8 @@ jobs: timeoutInMinutes: 90 pool: vmImage: "ubuntu-latest" - container: "quay.io/pypa/manylinux2014_x86_64:latest" + container: "quay.io/pypa/manylinux_2_34_x86_64:latest" + condition: eq(1,1) steps: - script: "/opt/python/cp38-cp38/bin/python -m venv $PWD/env" - script: 'echo "##vso[task.prependpath]$PWD/env/bin"' @@ -66,7 +67,6 @@ jobs: pool: vmImage: "ubuntu-latest" container: "quay.io/pypa/manylinux2014_x86_64:latest" - condition: eq(0,1) steps: - script: curl -L -o /tmp/arm-toolchain.tar.xz 'https://developer.arm.com/-/media/Files/downloads/gnu/11.2-2022.02/binrel/gcc-arm-11.2-2022.02-x86_64-aarch64-none-linux-gnu.tar.xz?rev=33c6e30e5ac64e6dba8f0431f2c35f1b&hash=9918A05BF47621B632C7A5C8D2BB438FB80A4480' - script: mkdir -p /tmp/arm-toolchain/ diff --git a/examples/c++/example.cpp b/examples/c++/example.cpp index c3902dfff..2bb5510e4 100644 --- a/examples/c++/example.cpp +++ b/examples/c++/example.cpp @@ -1024,14 +1024,17 @@ void polymorphic_datatype_example() { symbol is_pair_name = ctx.str_symbol("is-pair"); symbol first_name = ctx.str_symbol("first"); symbol second_name = ctx.str_symbol("second"); - + symbol field_names[2] = {first_name, second_name}; - sort field_sorts[2] = {alpha, beta}; // Use type variables + sort _field_sorts[2] = {alpha, beta}; + sort_vector field_sorts(ctx); + field_sorts.push_back(alpha); // Use type variables + field_sorts.push_back(beta); // Use type variables constructors cs(ctx); - cs.add(mk_pair_name, is_pair_name, 2, field_names, field_sorts); - sort pair = ctx.datatype(pair_name, cs); - + cs.add(mk_pair_name, is_pair_name, 2, field_names, _field_sorts); + sort pair = ctx.datatype(pair_name, field_sorts, cs); + std::cout << "Created parametric datatype: " << pair << "\n"; // Instantiate Pair with concrete types: (Pair Int Real) diff --git a/scripts/VERSION.txt b/scripts/VERSION.txt index 6baf7570c..79c398614 100644 --- a/scripts/VERSION.txt +++ b/scripts/VERSION.txt @@ -1 +1 @@ -4.15.4.0 +4.15.5.0 diff --git a/scripts/nightly.yaml b/scripts/nightly.yaml index 6618a301c..a86e6536d 100644 --- a/scripts/nightly.yaml +++ b/scripts/nightly.yaml @@ -2,7 +2,7 @@ variables: # Version components read from VERSION.txt (updated manually when VERSION.txt changes) Major: '4' Minor: '15' - Patch: '4' + Patch: '5' ReleaseVersion: $(Major).$(Minor).$(Patch) NightlyVersion: $(Major).$(Minor).$(Patch).$(Build.BuildId) # TODO: Auto-read from VERSION.txt when Azure DevOps supports it better @@ -365,17 +365,17 @@ stages: inputs: artifactName: 'WindowsBuild-x86' targetPath: $(Agent.TempDirectory) - - task: DownloadPipelineArtifact@2 - displayName: 'Download ManyLinux Build' - inputs: - artifactName: 'ManyLinuxPythonBuildAMD64' - targetPath: $(Agent.TempDirectory) +# - task: DownloadPipelineArtifact@2 +# displayName: 'Download ManyLinux Build' +# inputs: +# artifactName: 'ManyLinuxPythonBuildAMD64' +# targetPath: $(Agent.TempDirectory) - task: DownloadPipelineArtifact@2 displayName: 'Download ManyLinux Arm64 Build' inputs: artifactName: 'ManyLinuxPythonBuildArm64' targetPath: $(Agent.TempDirectory) - - script: cd $(Agent.TempDirectory); mkdir osx-x64-bin; cd osx-x64-bin; unzip ../*x64-osx*.zip +# - script: cd $(Agent.TempDirectory); mkdir osx-x64-bin; cd osx-x64-bin; unzip ../*x64-osx*.zip - script: cd $(Agent.TempDirectory); mkdir osx-arm64-bin; cd osx-arm64-bin; unzip ../*arm64-osx*.zip # - script: cd $(Agent.TempDirectory); mkdir musl-bin; cd musl-bin; unzip ../*-linux.zip - script: cd $(Agent.TempDirectory); mkdir win32-bin; cd win32-bin; unzip ../*x86-win*.zip diff --git a/scripts/release.yml b/scripts/release.yml index 506295525..5c88c89ae 100644 --- a/scripts/release.yml +++ b/scripts/release.yml @@ -6,7 +6,7 @@ trigger: none variables: - ReleaseVersion: '4.15.4' # TODO: Auto-read from VERSION.txt when Azure DevOps supports it better + ReleaseVersion: '4.15.5' # TODO: Auto-read from VERSION.txt when Azure DevOps supports it better stages: @@ -476,7 +476,7 @@ stages: - job: NuGetPublish - condition: eq(1,1) + condition: eq(0,1) displayName: "Publish to NuGet.org" steps: - task: DownloadPipelineArtifact@2 diff --git a/src/api/api_seq.cpp b/src/api/api_seq.cpp index 2b87ef290..cf199af41 100644 --- a/src/api/api_seq.cpp +++ b/src/api/api_seq.cpp @@ -293,6 +293,9 @@ extern "C" { MK_TERNARY(Z3_mk_seq_extract, mk_c(c)->get_seq_fid(), OP_SEQ_EXTRACT, SKIP); MK_TERNARY(Z3_mk_seq_replace, mk_c(c)->get_seq_fid(), OP_SEQ_REPLACE, SKIP); + MK_TERNARY(Z3_mk_seq_replace_all, mk_c(c)->get_seq_fid(), OP_SEQ_REPLACE_ALL, SKIP); + MK_TERNARY(Z3_mk_seq_replace_re, mk_c(c)->get_seq_fid(), OP_SEQ_REPLACE_RE, SKIP); + MK_TERNARY(Z3_mk_seq_replace_re_all, mk_c(c)->get_seq_fid(), OP_SEQ_REPLACE_RE_ALL, SKIP); MK_BINARY(Z3_mk_seq_at, mk_c(c)->get_seq_fid(), OP_SEQ_AT, SKIP); MK_BINARY(Z3_mk_seq_nth, mk_c(c)->get_seq_fid(), OP_SEQ_NTH, SKIP); MK_UNARY(Z3_mk_seq_length, mk_c(c)->get_seq_fid(), OP_SEQ_LENGTH, SKIP); diff --git a/src/api/c++/z3++.h b/src/api/c++/z3++.h index 71f3ff79b..2acb010cb 100644 --- a/src/api/c++/z3++.h +++ b/src/api/c++/z3++.h @@ -327,6 +327,15 @@ namespace z3 { */ sort datatype(symbol const& name, constructors const& cs); + /** + \brief Create a parametric recursive datatype. + \c name is the name of the recursive datatype + \c params - the sort parameters of the datatype + \c cs - the \c n constructors used to define the datatype + References to the datatype and mutually recursive datatypes can be created using \ref datatype_sort. + */ + sort datatype(symbol const &name, sort_vector const ¶ms, constructors const &cs); + /** \brief Create a set of mutually recursive datatypes. \c n - number of recursive datatypes @@ -3616,6 +3625,16 @@ namespace z3 { return sort(*this, s); } + inline sort context::datatype(symbol const &name, sort_vector const& params, constructors const &cs) { + array _params(params); + array _cs(cs.size()); + for (unsigned i = 0; i < cs.size(); ++i) + _cs[i] = cs[i]; + Z3_sort s = Z3_mk_polymorphic_datatype(*this, name, _params.size(), _params.ptr(), cs.size(), _cs.ptr()); + check_error(); + return sort(*this, s); + } + inline sort_vector context::datatypes( unsigned n, symbol const* names, constructor_list *const* cons) { diff --git a/src/api/java/Context.java b/src/api/java/Context.java index 691ecd737..9a8218537 100644 --- a/src/api/java/Context.java +++ b/src/api/java/Context.java @@ -2226,6 +2226,15 @@ public class Context implements AutoCloseable { return (IntExpr)Expr.create(this, Native.mkSeqIndex(nCtx(), s.getNativeObject(), substr.getNativeObject(), offset.getNativeObject())); } + /** + * Extract the last index of sub-string. + */ + public final IntExpr mkLastIndexOf(Expr> s, Expr> substr) + { + checkContextMatch(s, substr); + return (IntExpr)Expr.create(this, Native.mkSeqLastIndex(nCtx(), s.getNativeObject(), substr.getNativeObject())); + } + /** * Replace the first occurrence of src by dst in s. */ @@ -2235,6 +2244,33 @@ public class Context implements AutoCloseable { return (SeqExpr) Expr.create(this, Native.mkSeqReplace(nCtx(), s.getNativeObject(), src.getNativeObject(), dst.getNativeObject())); } + /** + * Replace all occurrences of src by dst in s. + */ + public final SeqExpr mkReplaceAll(Expr> s, Expr> src, Expr> dst) + { + checkContextMatch(s, src, dst); + return (SeqExpr) Expr.create(this, Native.mkSeqReplaceAll(nCtx(), s.getNativeObject(), src.getNativeObject(), dst.getNativeObject())); + } + + /** + * Replace the first occurrence of regular expression re with dst in s. + */ + public final SeqExpr mkReplaceRe(Expr> s, ReExpr> re, Expr> dst) + { + checkContextMatch(s, re, dst); + return (SeqExpr) Expr.create(this, Native.mkSeqReplaceRe(nCtx(), s.getNativeObject(), re.getNativeObject(), dst.getNativeObject())); + } + + /** + * Replace all occurrences of regular expression re with dst in s. + */ + public final SeqExpr mkReplaceReAll(Expr> s, ReExpr> re, Expr> dst) + { + checkContextMatch(s, re, dst); + return (SeqExpr) Expr.create(this, Native.mkSeqReplaceReAll(nCtx(), s.getNativeObject(), re.getNativeObject(), dst.getNativeObject())); + } + /** * Convert a regular expression that accepts sequence s. */ diff --git a/src/api/java/Sort.java b/src/api/java/Sort.java index f612b9031..4910338f3 100644 --- a/src/api/java/Sort.java +++ b/src/api/java/Sort.java @@ -144,6 +144,8 @@ public class Sort extends AST return new SeqSort<>(ctx, obj); case Z3_RE_SORT: return new ReSort<>(ctx, obj); + case Z3_CHAR_SORT: + return new CharSort(ctx, obj); default: throw new Z3Exception("Unknown sort kind"); } diff --git a/src/api/z3_api.h b/src/api/z3_api.h index baa2fa34c..c5d3933ca 100644 --- a/src/api/z3_api.h +++ b/src/api/z3_api.h @@ -3800,6 +3800,27 @@ extern "C" { */ Z3_ast Z3_API Z3_mk_seq_replace(Z3_context c, Z3_ast s, Z3_ast src, Z3_ast dst); + /** + \brief Replace all occurrences of \c src with \c dst in \c s. + + def_API('Z3_mk_seq_replace_all', AST ,(_in(CONTEXT), _in(AST), _in(AST), _in(AST))) + */ + Z3_ast Z3_API Z3_mk_seq_replace_all(Z3_context c, Z3_ast s, Z3_ast src, Z3_ast dst); + + /** + \brief Replace the first occurrence of regular expression \c re with \c dst in \c s. + + def_API('Z3_mk_seq_replace_re', AST ,(_in(CONTEXT), _in(AST), _in(AST), _in(AST))) + */ + Z3_ast Z3_API Z3_mk_seq_replace_re(Z3_context c, Z3_ast s, Z3_ast re, Z3_ast dst); + + /** + \brief Replace all occurrences of regular expression \c re with \c dst in \c s. + + def_API('Z3_mk_seq_replace_re_all', AST ,(_in(CONTEXT), _in(AST), _in(AST), _in(AST))) + */ + Z3_ast Z3_API Z3_mk_seq_replace_re_all(Z3_context c, Z3_ast s, Z3_ast re, Z3_ast dst); + /** \brief Retrieve from \c s the unit sequence positioned at position \c index. The sequence is empty if the index is out of bounds. diff --git a/src/ast/datatype_decl_plugin.cpp b/src/ast/datatype_decl_plugin.cpp index 5bb918c5f..d0c74bd50 100644 --- a/src/ast/datatype_decl_plugin.cpp +++ b/src/ast/datatype_decl_plugin.cpp @@ -300,18 +300,12 @@ namespace datatype { TRACE(datatype, tout << "expected sort parameter at position " << i << " got: " << s << "\n";); throw invalid_datatype(); } - // Allow type variables as parameters for polymorphic datatypes - sort* param_sort = to_sort(s.get_ast()); - if (!m_manager->is_type_var(param_sort) && param_sort->get_family_id() == null_family_id) { - // Type variables and concrete sorts are allowed, but not other uninterpreted sorts - // Actually, all sorts should be allowed including uninterpreted ones - } } sort* s = m_manager->mk_sort(name.get_symbol(), sort_info(m_family_id, k, num_parameters, parameters, true)); def* d = nullptr; - if (m_defs.find(s->get_name(), d) && d->sort_size()) { + if (m_defs.find(s->get_name(), d) && d->sort_size() && d->params().size() == num_parameters - 1) { obj_map S; for (unsigned i = 0; i + 1 < num_parameters; ++i) { sort* r = to_sort(parameters[i + 1].get_ast()); diff --git a/src/ast/euf/ho_matcher.h b/src/ast/euf/ho_matcher.h index 007bdea2c..65477078c 100644 --- a/src/ast/euf/ho_matcher.h +++ b/src/ast/euf/ho_matcher.h @@ -100,14 +100,14 @@ namespace euf { class match_goals { protected: - ast_manager &m; ho_matcher& ho; + ast_manager &m; match_goal* m_expensive = nullptr, *m_cheap = nullptr; match_goal* pop(match_goal*& q); public: - match_goals(ho_matcher& em, ast_manager &m) : m(m), ho(em) {} + match_goals(ho_matcher& em, ast_manager& m) : ho(em), m(m) {} bool empty() const { return m_cheap == nullptr && m_expensive == nullptr; } void reset() { m_cheap = m_expensive = nullptr; } void push(unsigned level, unsigned offset, expr_ref const& pat, expr_ref const& t); @@ -158,7 +158,6 @@ namespace euf { }; class unitary_patterns { - ast_manager& m; array_util a; vector m_patterns; vector> m_is_unitary; @@ -181,7 +180,7 @@ namespace euf { } public: - unitary_patterns(ast_manager& m) : m(m), a(m) {} + unitary_patterns(ast_manager& m) : a(m) {} bool is_unitary(unsigned offset, expr* p) const { return find(offset, p) == l_true; diff --git a/src/nlsat/nlsat_solver.cpp b/src/nlsat/nlsat_solver.cpp index bad981011..084e3a479 100644 --- a/src/nlsat/nlsat_solver.cpp +++ b/src/nlsat/nlsat_solver.cpp @@ -1116,7 +1116,7 @@ namespace nlsat { void log_lemma(std::ostream& out, unsigned n, literal const* cls, bool is_valid) { ++m_lemma_count; - out << "(set-logic NRA)\n"; + out << "(set-logic ALL)\n"; if (is_valid) { display_smt2_bool_decls(out); display_smt2_arith_decls(out); diff --git a/src/test/polynomial_factorization.cpp b/src/test/polynomial_factorization.cpp index a7aa9af84..5efab2cd9 100644 --- a/src/test/polynomial_factorization.cpp +++ b/src/test/polynomial_factorization.cpp @@ -207,7 +207,189 @@ void test_factorization_gcd() { VERIFY(nm.eq(gcd_result[1], mpz(1))); } +void test_factorization_large_multivariate_missing_factors() { + std::cout << "test_factorization_large_multivariate_missing_factors\n"; + + reslimit rl; + numeral_manager nm; + manager m(rl, nm); + + polynomial_ref x0(m); + polynomial_ref x1(m); + polynomial_ref x2(m); + x0 = m.mk_polynomial(m.mk_var()); + x1 = m.mk_polynomial(m.mk_var()); + x2 = m.mk_polynomial(m.mk_var()); + + struct term_t { + int coeff; + unsigned e0; + unsigned e1; + unsigned e2; + }; + + /* + - x2^8 - x1 x2^7 - x0 x2^7 + 48 x2^7 + 2 x1^2 x2^6 + x0 x1 x2^6 + 132 x1 x2^6 + 2 x0^2 x2^6 + 132 x0 x2^6 + - 144 x2^6 + 2 x1^3 x2^5 + 6 x0 x1^2 x2^5 + 180 x1^2 x2^5 + 6 x0^2 x1 x2^5 + 432 x0 x1 x2^5 - + 864 x1 x2^5 + 2 x0^3 x2^5 + 180 x0^2 x2^5 - 864 x0 x2^5 - x1^4 x2^4 + 2 x0 x1^3 x2^4 + + 156 x1^3 x2^4 + 3 x0^2 x1^2 x2^4 + 684 x0 x1^2 x2^4 - 1620 x1^2 x2^4 + 2 x0^3 x1 x2^4 + 684 x0^2 x1 x2^4 - + 4536 x0 x1 x2^4 - x0^4 x2^4 + 156 x0^3 x2^4 - 1620 x0^2 x2^4 - x1^5 x2^3 - 5 x0 x1^4 x2^3 + 60 x1^4 x2^3 - + 7 x0^2 x1^3 x2^3 + 600 x0 x1^3 x2^3 - 900 x1^3 x2^3 - 7 x0^3 x1^2 x2^3 + 1080 x0^2 x1^2 x2^3 - 7452 x0 x1^2 x2^3 - + 5 x0^4 x1 x2^3 + 600 x0^3 x1 x2^3 - 7452 x0^2 x1 x2^3 - x0^5 x2^3 + 60 x0^4 x2^3 - 900 x0^3 x2^3 - 3 x0 x1^5 x2^2 - + 9 x0^2 x1^4 x2^2 + 216 x0 x1^4 x2^2 - 13 x0^3 x1^3 x2^2 + 828 x0^2 x1^3 x2^2 - 3780 x0 x1^3 x2^2 - 9 x0^4 x1^2 x2^2 + + 828 x0^3 x1^2 x2^2 - 11016 x0^2 x1^2 x2^2 - 3 x0^5 x1 x2^2 + 216 x0^4 x1 x2^2 - 3780 x0^3 x1 x2^2 - 3 x0^2 x1^5 x2 - + 7 x0^3 x1^4 x2 + 252 x0^2 x1^4 x2 - 7 x0^4 x1^3 x2 + 480 x0^3 x1^3 x2 - 5184 x0^2 x1^3 x2 - 3 x0^5 x1^2 x2 + + 252 x0^4 x1^2 x2 - 5184 x0^3 x1^2 x2 - x0^3 x1^5 - 2 x0^4 x1^4 + 96 x0^3 x1^4 - x0^5 x1^3 + 96 x0^4 x1^3 - 2304 x0^3 x1^3 + */ + static const term_t terms[] = { + { -1, 0u, 0u, 8u }, + { -1, 0u, 1u, 7u }, + { -1, 1u, 0u, 7u }, + { 48, 0u, 0u, 7u }, + { 2, 0u, 2u, 6u }, + { 1, 1u, 1u, 6u }, + { 132, 0u, 1u, 6u }, + { 2, 2u, 0u, 6u }, + { 132, 1u, 0u, 6u }, + { -144, 0u, 0u, 6u }, + { 2, 0u, 3u, 5u }, + { 6, 1u, 2u, 5u }, + { 180, 0u, 2u, 5u }, + { 6, 2u, 1u, 5u }, + { 432, 1u, 1u, 5u }, + { -864, 0u, 1u, 5u }, + { 2, 3u, 0u, 5u }, + { 180, 2u, 0u, 5u }, + { -864, 1u, 0u, 5u }, + { -1, 0u, 4u, 4u }, + { 2, 1u, 3u, 4u }, + { 156, 0u, 3u, 4u }, + { 3, 2u, 2u, 4u }, + { 684, 1u, 2u, 4u }, + { -1620, 0u, 2u, 4u }, + { 2, 3u, 1u, 4u }, + { 684, 2u, 1u, 4u }, + { -4536, 1u, 1u, 4u }, + { -1, 4u, 0u, 4u }, + { 156, 3u, 0u, 4u }, + { -1620, 2u, 0u, 4u }, + { -1, 0u, 5u, 3u }, + { -5, 1u, 4u, 3u }, + { 60, 0u, 4u, 3u }, + { -7, 2u, 3u, 3u }, + { 600, 1u, 3u, 3u }, + { -900, 0u, 3u, 3u }, + { -7, 3u, 2u, 3u }, + { 1080, 2u, 2u, 3u }, + { -7452, 1u, 2u, 3u }, + { -5, 4u, 1u, 3u }, + { 600, 3u, 1u, 3u }, + { -7452, 2u, 1u, 3u }, + { -1, 5u, 0u, 3u }, + { 60, 4u, 0u, 3u }, + { -900, 3u, 0u, 3u }, + { -3, 1u, 5u, 2u }, + { -9, 2u, 4u, 2u }, + { 216, 1u, 4u, 2u }, + { -13, 3u, 3u, 2u }, + { 828, 2u, 3u, 2u }, + { -3780, 1u, 3u, 2u }, + { -9, 4u, 2u, 2u }, + { 828, 3u, 2u, 2u }, + { -11016, 2u, 2u, 2u }, + { -3, 5u, 1u, 2u }, + { 216, 4u, 1u, 2u }, + { -3780, 3u, 1u, 2u }, + { -3, 2u, 5u, 1u }, + { -7, 3u, 4u, 1u }, + { 252, 2u, 4u, 1u }, + { -7, 4u, 3u, 1u }, + { 480, 3u, 3u, 1u }, + { -5184, 2u, 3u, 1u }, + { -3, 5u, 2u, 1u }, + { 252, 4u, 2u, 1u }, + { -5184, 3u, 2u, 1u }, + { -1, 3u, 5u, 0u }, + { -2, 4u, 4u, 0u }, + { 96, 3u, 4u, 0u }, + { -1, 5u, 3u, 0u }, + { 96, 4u, 3u, 0u }, + { -2304, 3u, 3u, 0u }, + }; + + polynomial_ref p(m); + p = m.mk_zero(); + + for (const auto & term : terms) { + polynomial_ref t(m); + t = m.mk_const(rational(term.coeff)); + if (term.e0 != 0) { + t = t * (x0 ^ term.e0); + } + if (term.e1 != 0) { + t = t * (x1 ^ term.e1); + } + if (term.e2 != 0) { + t = t * (x2 ^ term.e2); + } + p = p + t; + } + + factors fs(m); + factor(p, fs); + VERIFY(fs.distinct_factors() == 2); // indeed there are 3 factors, that is demonstrated by the loop + for (unsigned i = 0; i < fs.distinct_factors(); i++) { + polynomial_ref f(m); + f = fs[i]; + if (degree(f, x1)<= 1) continue; + factors fs0(m); + factor(f, fs0); + VERIFY(fs0.distinct_factors() >= 2); + } + + polynomial_ref reconstructed(m); + fs.multiply(reconstructed); + VERIFY(eq(reconstructed, p)); +} + +void test_factorization_multivariate_missing_factors() { + std::cout << "test_factorization_multivariate_missing_factors\n"; + + reslimit rl; + numeral_manager nm; + manager m(rl, nm); + + polynomial_ref x0(m); + polynomial_ref x1(m); + x0 = m.mk_polynomial(m.mk_var()); + x1 = m.mk_polynomial(m.mk_var()); + + polynomial_ref p(m); + p = (x0 + x1) * (x0 + (2 * x1)) * (x0 + (3 * x1)); + + factors fs(m); + factor(p, fs); + + // Multivariate factorization stops after returning the whole polynomial. + VERIFY(fs.distinct_factors() == 1); + VERIFY(m.degree(fs[0], 0) == 3); + + factors fs_refined(m); + polynomial_ref residual = fs[0]; + factor(residual, fs_refined); + + // A second attempt still fails to expose the linear factors. + VERIFY(fs_refined.distinct_factors() == 1); // actually we need 3 factors + VERIFY(m.degree(fs_refined[0], 0) == 3); // actually we need degree 1 + + polynomial_ref reconstructed(m); + fs.multiply(reconstructed); + VERIFY(eq(reconstructed, p)); +} + void test_polynomial_factorization() { + test_factorization_large_multivariate_missing_factors(); + test_factorization_multivariate_missing_factors(); test_factorization_basic(); test_factorization_irreducible(); test_factorization_cubic(); @@ -221,4 +403,4 @@ void test_polynomial_factorization() { void tst_polynomial_factorization() { polynomial::test_polynomial_factorization(); -} \ No newline at end of file +}