diff --git a/.github/benchmark_projects.yml b/.github/benchmark_projects.yml index ba0c598c25a..3503a439eb7 100644 --- a/.github/benchmark_projects.yml +++ b/.github/benchmark_projects.yml @@ -7,7 +7,7 @@ projects: num_runs: 5 compilation-timeout: 2.5 execution-timeout: 0.08 - compilation-memory-limit: 300 + compilation-memory-limit: 350 execution-memory-limit: 250 private-kernel-tail: repo: AztecProtocol/aztec-packages @@ -18,7 +18,7 @@ projects: compilation-timeout: 1.2 execution-timeout: 0.02 compilation-memory-limit: 250 - execution-memory-limit: 210 + execution-memory-limit: 230 private-kernel-reset: repo: AztecProtocol/aztec-packages ref: *AZ_COMMIT @@ -65,7 +65,7 @@ projects: cannot_execute: true num_runs: 1 timeout: 60 - compilation-timeout: 100 + compilation-timeout: 110 compilation-memory-limit: 8000 rollup-block-root: repo: AztecProtocol/aztec-packages diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index c123def6ba3..663142091d3 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -33,7 +33,7 @@ jobs: // Check if any file is within the 'docs' folder const docsChanged = files.some(file => file.filename.startsWith('docs/')); return docsChanged; - + - name: Add label if not present if: steps.check-labels.outputs.result == 'true' uses: actions/github-script@v7.0.1 @@ -57,7 +57,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -102,13 +102,13 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - + - name: Download built docs uses: actions/download-artifact@v4 with: name: docs path: ./docs/build - + - name: Deploy to Netlify uses: nwtgck/actions-netlify@v2.1 with: diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 34216c22e01..007fd89b0ae 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -25,7 +25,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 with: targets: x86_64-unknown-linux-gnu components: clippy, rustfmt @@ -51,7 +51,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 with: targets: x86_64-unknown-linux-gnu components: clippy, rustfmt @@ -89,7 +89,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -121,7 +121,7 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - + - name: Download nargo binary uses: ./.github/actions/download-nargo diff --git a/.github/workflows/publish-acvm.yml b/.github/workflows/publish-acvm.yml index 27d927a67d1..60e523777e8 100644 --- a/.github/workflows/publish-acvm.yml +++ b/.github/workflows/publish-acvm.yml @@ -18,7 +18,7 @@ jobs: ref: ${{ inputs.noir-ref }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 # These steps are in a specific order so crate dependencies are updated first - name: Publish acir_field diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index 76c6fce6d5e..8186213effd 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -22,9 +22,9 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ inputs.noir-ref }} - + - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -44,7 +44,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: noirc_abi_wasm - path: | + path: | ./tooling/noirc_abi_wasm/nodejs ./tooling/noirc_abi_wasm/web retention-days: 10 @@ -58,7 +58,7 @@ jobs: ref: ${{ inputs.noir-ref }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -95,7 +95,7 @@ jobs: ref: ${{ inputs.noir-ref }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -119,7 +119,7 @@ jobs: ./acvm-repo/acvm_js/nodejs ./acvm-repo/acvm_js/web retention-days: 3 - + publish-es-packages: runs-on: ubuntu-22.04 needs: [build-acvm_js, build-noirc_abi_wasm, build-noir_wasm] @@ -133,12 +133,12 @@ jobs: with: name: acvm-js path: acvm-repo/acvm_js - + - uses: actions/download-artifact@v4 with: name: noir_wasm path: compiler/wasm - + - uses: actions/download-artifact@v4 with: name: noirc_abi_wasm diff --git a/.github/workflows/publish-nargo.yml b/.github/workflows/publish-nargo.yml index 9501b979812..e18dac52ca4 100644 --- a/.github/workflows/publish-nargo.yml +++ b/.github/workflows/publish-nargo.yml @@ -38,15 +38,8 @@ jobs: with: ref: ${{ inputs.tag || env.GITHUB_REF }} - - name: Setup for Apple Silicon - if: matrix.target == 'aarch64-apple-darwin' - run: | - sudo xcode-select -s /Applications/Xcode_15.4.0.app/Contents/Developer/ - echo "SDKROOT=$(xcrun -sdk macosx$(sw_vers -productVersion) --show-sdk-path)" >> $GITHUB_ENV - echo "MACOSX_DEPLOYMENT_TARGET=$(xcrun -sdk macosx$(sw_vers -productVersion) --show-sdk-platform-version)" >> $GITHUB_ENV - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 with: targets: ${{ matrix.target }} @@ -61,7 +54,7 @@ jobs: cargo build --package nargo_cli --release --target ${{ matrix.target }} --no-default-features --features "${{ inputs.features }}" cargo build --package noir_profiler --release --target ${{ matrix.target }} --no-default-features --features "${{ inputs.features }}" cargo build --package noir_inspector --release --target ${{ matrix.target }} --no-default-features --features "${{ inputs.features }}" - + - name: Package artifacts run: | mkdir dist @@ -151,7 +144,7 @@ jobs: ref: ${{ inputs.tag || env.GITHUB_REF }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 with: targets: ${{ matrix.target }} @@ -245,4 +238,4 @@ jobs: overwrite: true tag: ${{ format('{0}-{1}', 'nightly', steps.date.outputs.date) }} - + diff --git a/.github/workflows/reports.yml b/.github/workflows/reports.yml index cdc2e2c33a3..d458f8998f0 100644 --- a/.github/workflows/reports.yml +++ b/.github/workflows/reports.yml @@ -32,7 +32,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -305,7 +305,7 @@ jobs: repository: ${{ matrix.repo }} path: test-repo ref: ${{ matrix.ref }} - + - name: Fetch noir dependencies working-directory: ./test-repo/${{ matrix.path }} run: | @@ -341,7 +341,7 @@ jobs: path: ${{ steps.compilation_report.outputs.report_path }} retention-days: 3 overwrite: true - + - name: Generate execution report id: execution_report working-directory: ./test-repo/${{ matrix.path }} @@ -359,7 +359,7 @@ jobs: echo "report_name=$REPORT_NAME" >> $GITHUB_OUTPUT echo "report_path=$REPORT_PATH" >> $GITHUB_OUTPUT - + - name: Upload execution report if: ${{ !matrix.cannot_execute }} uses: actions/upload-artifact@v4 @@ -716,7 +716,7 @@ jobs: - upload_compilation_memory_report - upload_execution_report - upload_execution_memory_report - + steps: - name: Report overall success run: | diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index a4f73a5898b..f88e64fa1a5 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -54,7 +54,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -87,7 +87,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -119,7 +119,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -154,7 +154,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 - uses: Swatinem/rust-cache@v2 with: @@ -493,7 +493,7 @@ jobs: fail-fast: false matrix: include: ${{ fromJson( needs.critical-library-list.outputs.libraries )}} - + name: Check external repo - ${{ matrix.repo }}/${{ matrix.path }} steps: - name: Checkout diff --git a/.github/workflows/test-rust-workspace-msrv.yml b/.github/workflows/test-rust-workspace-msrv.yml index 38bc3cba153..8f061bcad64 100644 --- a/.github/workflows/test-rust-workspace-msrv.yml +++ b/.github/workflows/test-rust-workspace-msrv.yml @@ -3,7 +3,7 @@ name: Test (MSRV check) # TL;DR https://github.com/noir-lang/noir/issues/4384 # # This workflow acts to ensure that we can publish to crates.io, we need this extra check as libraries don't respect the Cargo.lock file committed in this repository. -# We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow. +# We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow. on: schedule: @@ -29,12 +29,12 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 with: targets: x86_64-unknown-linux-gnu # We force the ACVM crate and all of its dependencies to update their dependencies - # This ensures that we'll be able to build the crates when they're being published. + # This ensures that we'll be able to build the crates when they're being published. - name: Update Cargo.lock run: | cargo update --package acvm --aggressive @@ -53,7 +53,7 @@ jobs: - name: Build and archive tests run: cargo nextest archive --workspace --archive-file nextest-archive.tar.zst - + - name: Upload archive to workflow uses: actions/upload-artifact@v4 with: @@ -70,9 +70,9 @@ jobs: partition: [1, 2, 3, 4] steps: - uses: actions/checkout@v4 - + - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 with: targets: x86_64-unknown-linux-gnu @@ -80,7 +80,7 @@ jobs: uses: taiki-e/install-action@v2 with: tool: nextest@0.9.67 - + - name: Download archive uses: actions/download-artifact@v4 with: @@ -99,9 +99,9 @@ jobs: runs-on: ubuntu-22.04 # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. if: ${{ always() }} - needs: + needs: - run-tests - + steps: - name: Report overall success run: | @@ -113,7 +113,7 @@ jobs: env: # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }} - + - name: Checkout if: ${{ failure() }} uses: actions/checkout@v4 diff --git a/.github/workflows/test-rust-workspace.yml b/.github/workflows/test-rust-workspace.yml index fe421361072..91809a98a26 100644 --- a/.github/workflows/test-rust-workspace.yml +++ b/.github/workflows/test-rust-workspace.yml @@ -23,7 +23,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 with: targets: x86_64-unknown-linux-gnu @@ -40,7 +40,7 @@ jobs: - name: Build and archive tests run: cargo nextest archive --workspace --archive-file nextest-archive.tar.zst - + - name: Upload archive to workflow uses: actions/upload-artifact@v4 with: @@ -57,9 +57,9 @@ jobs: partition: [1, 2, 3, 4] steps: - uses: actions/checkout@v4 - + - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.75.0 + uses: dtolnay/rust-toolchain@1.85.0 with: targets: x86_64-unknown-linux-gnu @@ -67,7 +67,7 @@ jobs: uses: taiki-e/install-action@v2 with: tool: nextest@0.9.67 - + - name: Download archive uses: actions/download-artifact@v4 with: @@ -86,9 +86,9 @@ jobs: runs-on: ubuntu-22.04 # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. if: ${{ always() }} - needs: + needs: - run-tests - + steps: - name: Report overall success run: | diff --git a/Cargo.lock b/Cargo.lock index 2962b688996..eec578098cd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -110,7 +110,7 @@ dependencies = [ "build-data", "console_error_panic_hook", "const-str", - "getrandom", + "getrandom 0.2.15", "gloo-utils", "js-sys", "pkg-config", @@ -150,7 +150,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom", + "getrandom 0.2.15", "once_cell", "version_check", "zerocopy", @@ -253,9 +253,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4" [[package]] name = "ark-bls12-381" @@ -386,7 +386,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" dependencies = [ "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -412,7 +412,7 @@ dependencies = [ "num-traits", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -499,7 +499,7 @@ checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -587,13 +587,13 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.85" +version = "0.1.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -643,9 +643,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +checksum = "c103cbbedac994e292597ab79342dbd5b306a362045095db54917d92a9fdfd92" [[package]] name = "binary-merge" @@ -721,9 +721,9 @@ dependencies = [ [[package]] name = "blake2b_simd" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" +checksum = "06e903a20b159e944f91ec8499fe1e55651480c541ea0a584f5d967c49ad9d99" dependencies = [ "arrayref", "arrayvec", @@ -732,15 +732,16 @@ dependencies = [ [[package]] name = "blake3" -version = "1.5.5" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" +checksum = "1230237285e3e10cde447185e8975408ae24deaa67205ce684805c25bc0c7937" dependencies = [ "arrayref", "arrayvec", "cc", "cfg-if", "constant_time_eq", + "memmap2", ] [[package]] @@ -826,9 +827,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "bytemuck" @@ -844,9 +845,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" [[package]] name = "cast" @@ -856,9 +857,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.10" +version = "1.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229" +checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" dependencies = [ "shlex", ] @@ -877,9 +878,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.39" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" +checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ "android-tzdata", "iana-time-zone", @@ -887,7 +888,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -919,9 +920,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.27" +version = "4.5.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796" +checksum = "027bb0d98429ae334a8698531da7077bdf906419543a35a55c2cb1b66437d767" dependencies = [ "clap_builder", "clap_derive", @@ -937,9 +938,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.27" +version = "4.5.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7" +checksum = "5589e0cba072e0f3d23791efac0fd8627b49c829c196a492e88168e6a669d863" dependencies = [ "anstream", "anstyle", @@ -949,23 +950,23 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.43" +version = "4.5.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0952013545c9c6dca60f491602655b795c6c062ab180c9cb0bccb83135461861" +checksum = "f5c5508ea23c5366f77e53f5a0070e5a84e51687ec3ef9e0464c86dc8d13ce98" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.24" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -1290,9 +1291,9 @@ dependencies = [ [[package]] name = "csv-core" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" +checksum = "7d02f3b0da4c6504f86e9cd789d8dbafab48c2321be74e9987593de5a894d93d" dependencies = [ "memchr", ] @@ -1329,7 +1330,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -1340,7 +1341,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -1463,7 +1464,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -1508,14 +1509,14 @@ dependencies = [ "enum-ordinalize", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] name = "either" -version = "1.13.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "b7914353092ddf589ad78f25c5c1c21b7f80b0ff8621e7c814c3485b5306da9d" [[package]] name = "elliptic-curve" @@ -1566,7 +1567,7 @@ checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -1605,14 +1606,14 @@ checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" @@ -1733,12 +1734,12 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.35" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" dependencies = [ "crc32fast", - "miniz_oxide 0.8.3", + "miniz_oxide 0.8.5", ] [[package]] @@ -1849,7 +1850,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -1910,10 +1911,22 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.13.3+wasi-0.2.2", + "windows-targets 0.52.6", +] + [[package]] name = "gimli" version = "0.28.1" @@ -1988,9 +2001,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +checksum = "5017294ff4bb30944501348f6f8e42e6ad28f42c8bbef7a74029aff064a4e3c2" dependencies = [ "atomic-waker", "bytes", @@ -2360,7 +2373,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -2629,7 +2642,7 @@ dependencies = [ "jsonrpsee-types", "parking_lot", "rand", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "thiserror", @@ -2672,7 +2685,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -2786,9 +2799,9 @@ checksum = "82903360c009b816f5ab72a9b68158c27c301ee2c3f20655b55c5e589e7d3bb7" [[package]] name = "libc" -version = "0.2.169" +version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" [[package]] name = "libredox" @@ -2809,7 +2822,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.8.0", "libc", - "redox_syscall 0.5.8", + "redox_syscall 0.5.9", ] [[package]] @@ -2848,9 +2861,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.25" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] name = "louds-rs" @@ -2946,9 +2959,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ "adler2", ] @@ -2961,7 +2974,7 @@ checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.48.0", ] @@ -2972,7 +2985,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.52.0", ] @@ -3283,7 +3296,7 @@ dependencies = [ "build-data", "console_error_panic_hook", "fm", - "getrandom", + "getrandom 0.2.15", "gloo-utils", "js-sys", "nargo", @@ -3325,7 +3338,7 @@ dependencies = [ "acvm", "build-data", "console_error_panic_hook", - "getrandom", + "getrandom 0.2.15", "gloo-utils", "iter-extended", "js-sys", @@ -3614,9 +3627,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.20.2" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "oorandom" @@ -3680,7 +3693,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.8", + "redox_syscall 0.5.9", "smallvec", "windows-targets 0.52.6", ] @@ -3783,22 +3796,22 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2ec53ad785f4d35dac0adea7f7dc6f1bb277ad84a680c7afefeae05d1f5916" +checksum = "dfe2e71e1471fe07709406bf725f710b02927c9c54b2b5b2ec0e8087d97c327d" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb" +checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -3956,7 +3969,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml_edit 0.22.22", + "toml_edit 0.22.24", ] [[package]] @@ -4002,7 +4015,7 @@ checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -4072,7 +4085,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.15", ] [[package]] @@ -4130,9 +4143,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +checksum = "82b568323e98e49e2a0899dcee453dd679fae22d69adf9b11dd508d1549b7e2f" dependencies = [ "bitflags 2.8.0", ] @@ -4149,7 +4162,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom", + "getrandom 0.2.15", "libredox 0.1.3", "thiserror", ] @@ -4233,15 +4246,14 @@ dependencies = [ [[package]] name = "ring" -version = "0.17.8" +version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "da5349ae27d3887ca812fb375b45a4fbb36d8d12d2df394968cd86e35683fe73" dependencies = [ "cc", "cfg-if", - "getrandom", + "getrandom 0.2.15", "libc", - "spin", "untrusted", "windows-sys 0.52.0", ] @@ -4272,7 +4284,7 @@ dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.96", + "syn 2.0.98", "walkdir", ] @@ -4300,9 +4312,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc_version" @@ -4328,9 +4340,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.21" +version = "0.23.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f287924602bf649d949c63dc8ac8b235fa5387d394020705b80c4eb597ce5b8" +checksum = "47796c98c480fce5406ef69d1c76378375492c3b0a0de587be0c1d9feb12f395" dependencies = [ "log", "once_cell", @@ -4594,9 +4606,9 @@ checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" [[package]] name = "serde" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] @@ -4637,20 +4649,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] name = "serde_json" -version = "1.0.137" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b" +checksum = "44f86c3acccc9c65b153fe1b85a3be07fe5515274ec9f0653b4a0875731c72a6" dependencies = [ "itoa", "memchr", @@ -4666,7 +4678,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -4705,7 +4717,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -4783,9 +4795,9 @@ dependencies = [ [[package]] name = "similar-asserts" -version = "1.6.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f08357795f0d604ea7d7130f22c74b03838c959bdb14adde3142aab4d18a293" +checksum = "b5b441962c817e33508847a22bd82f03a30cff43642dc2fae8b050566121eb9a" dependencies = [ "console", "similar", @@ -4827,9 +4839,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.2" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" dependencies = [ "serde", ] @@ -4954,9 +4966,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "symbolic-common" -version = "12.13.3" +version = "12.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13a4dfe4bbeef59c1f32fc7524ae7c95b9e1de5e79a43ce1604e181081d71b0c" +checksum = "b6189977df1d6ec30c920647919d76f29fb8d8f25e8952e835b0fcda25e8f792" dependencies = [ "debugid", "memmap2", @@ -4966,9 +4978,9 @@ dependencies = [ [[package]] name = "symbolic-demangle" -version = "12.13.3" +version = "12.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cf6a95abff97de4d7ff3473f33cacd38f1ddccad5c1feab435d6760300e3b6" +checksum = "d234917f7986498e7f62061438cee724bafb483fe84cfbe2486f68dce48240d7" dependencies = [ "cpp_demangle", "rustc-demangle", @@ -4988,9 +5000,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.96" +version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", @@ -5005,7 +5017,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -5016,13 +5028,13 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.15.0" +version = "3.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" +checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" dependencies = [ "cfg-if", "fastrand", - "getrandom", + "getrandom 0.3.1", "once_cell", "rustix", "windows-sys 0.59.0", @@ -5084,7 +5096,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -5095,7 +5107,7 @@ checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", "test-case-core", ] @@ -5137,7 +5149,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -5225,7 +5237,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -5300,13 +5312,13 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.22" +version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ "indexmap 2.7.1", "toml_datetime", - "winnow 0.6.25", + "winnow 0.7.3", ] [[package]] @@ -5368,7 +5380,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -5451,7 +5463,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" dependencies = [ "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -5484,9 +5496,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unarray" @@ -5496,9 +5508,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicode-ident" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" +checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" [[package]] name = "unicode-linebreak" @@ -5562,9 +5574,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.12.1" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3758f5e68192bb96cc8f9b7e2c2cfdabb435499a28499a42f8f984092adad4b" +checksum = "bd8dcafa1ca14750d8d7a05aa05988c17aab20886e1f3ae33a40223c58d92ef7" [[package]] name = "valuable" @@ -5595,9 +5607,9 @@ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wait-timeout" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" dependencies = [ "libc", ] @@ -5637,6 +5649,15 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasi" +version = "0.13.3+wasi-0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +dependencies = [ + "wit-bindgen-rt", +] + [[package]] name = "wasm-bindgen" version = "0.2.86" @@ -5660,7 +5681,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", "wasm-bindgen-shared", ] @@ -5694,7 +5715,7 @@ checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5741,9 +5762,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.7" +version = "0.26.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" +checksum = "2210b291f7ea53617fbafcc4939f10914214ec15aace5ba62293a668f322c5c9" dependencies = [ "rustls-pki-types", ] @@ -5788,6 +5809,12 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-link" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3" + [[package]] name = "windows-sys" version = "0.48.0" @@ -5947,13 +5974,22 @@ dependencies = [ [[package]] name = "winnow" -version = "0.6.25" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad699df48212c6cc6eb4435f35500ac6fd3b9913324f938aea302022ce19d310" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" dependencies = [ "memchr", ] +[[package]] +name = "wit-bindgen-rt" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +dependencies = [ + "bitflags 2.8.0", +] + [[package]] name = "write16" version = "1.0.0" @@ -5995,7 +6031,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", "synstructure", ] @@ -6017,7 +6053,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -6037,7 +6073,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", "synstructure", ] @@ -6058,7 +6094,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -6080,7 +6116,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index d41ea0939b7..2fda54652d6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -53,7 +53,7 @@ version = "1.0.0-beta.3" # x-release-please-end authors = ["The Noir Team "] edition = "2021" -rust-version = "1.75.0" +rust-version = "1.85.0" license = "MIT OR Apache-2.0" repository = "https://github.com/noir-lang/noir/" diff --git a/README.md b/README.md index c2e41435b66..20f1f80d83a 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ The current focus is to gather as much feedback as possible while in the alpha p ## Minimum Rust version -This workspace's minimum supported rustc version is 1.75.0. +This workspace's minimum supported rustc version is 1.85.0. ## License diff --git a/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/acvm-repo/acvm/src/pwg/blackbox/hash.rs index 7476b0dc2dc..438b3559e8e 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -49,9 +49,13 @@ fn get_hash_input( // in the message, then we error. if num_bytes_to_take > message_input.len() { return Err(OpcodeResolutionError::BlackBoxFunctionFailed( - acir::BlackBoxFunc::Blake2s, - format!("the number of bytes to take from the message is more than the number of bytes in the message. {} > {}", num_bytes_to_take, message_input.len()), - )); + acir::BlackBoxFunc::Blake2s, + format!( + "the number of bytes to take from the message is more than the number of bytes in the message. {} > {}", + num_bytes_to_take, + message_input.len() + ), + )); } let truncated_message = message_input[0..num_bytes_to_take].to_vec(); Ok(truncated_message) diff --git a/acvm-repo/acvm/src/pwg/brillig.rs b/acvm-repo/acvm/src/pwg/brillig.rs index a635cd92615..9625685c6d1 100644 --- a/acvm-repo/acvm/src/pwg/brillig.rs +++ b/acvm-repo/acvm/src/pwg/brillig.rs @@ -100,7 +100,7 @@ impl<'b, B: BlackBoxFunctionSolver, F: AcirField> BrilligSolver<'b, F, B> { Err(_) => { return Err(OpcodeResolutionError::OpcodeNotSolvable( OpcodeNotSolvable::ExpressionHasTooManyUnknowns(expr.clone()), - )) + )); } }, BrilligInputs::Array(expr_arr) => { @@ -111,7 +111,7 @@ impl<'b, B: BlackBoxFunctionSolver, F: AcirField> BrilligSolver<'b, F, B> { Err(_) => { return Err(OpcodeResolutionError::OpcodeNotSolvable( OpcodeNotSolvable::ExpressionHasTooManyUnknowns(expr.clone()), - )) + )); } } } diff --git a/acvm-repo/acvm/src/pwg/mod.rs b/acvm-repo/acvm/src/pwg/mod.rs index 6e0e28cf81d..8a6ca597b2f 100644 --- a/acvm-repo/acvm/src/pwg/mod.rs +++ b/acvm-repo/acvm/src/pwg/mod.rs @@ -73,6 +73,7 @@ impl std::fmt::Display for ACVMStatus { } } +#[expect(clippy::large_enum_variant)] pub enum StepResult<'a, F, B: BlackBoxFunctionSolver> { Status(ACVMStatus), IntoBrillig(BrilligSolver<'a, F, B>), @@ -142,7 +143,9 @@ pub enum OpcodeResolutionError { }, #[error("Attempted to call `main` with a `Call` opcode")] AcirMainCallAttempted { opcode_location: ErrorLocation }, - #[error("{results_size:?} result values were provided for {outputs_size:?} call output witnesses, most likely due to bad ACIR codegen")] + #[error( + "{results_size:?} result values were provided for {outputs_size:?} call output witnesses, most likely due to bad ACIR codegen" + )] AcirCallOutputsMismatch { opcode_location: ErrorLocation, results_size: u32, outputs_size: u32 }, #[error("(--pedantic): Predicates are expected to be 0 or 1, but found: {pred_value}")] PredicateLargerThanOne { opcode_location: ErrorLocation, pred_value: F }, diff --git a/acvm-repo/acvm_js/src/public_witness.rs b/acvm-repo/acvm_js/src/public_witness.rs index 245d5b4dd0a..a50cb640b63 100644 --- a/acvm-repo/acvm_js/src/public_witness.rs +++ b/acvm-repo/acvm_js/src/public_witness.rs @@ -44,7 +44,11 @@ pub fn get_return_witness( let circuit = match program.functions.len() { 0 => return Ok(JsWitnessMap::from(WitnessMap::new())), 1 => &program.functions[0], - _ => return Err(JsString::from("Program contains multiple circuits however ACVM currently only supports programs containing a single circuit")) + _ => { + return Err(JsString::from( + "Program contains multiple circuits however ACVM currently only supports programs containing a single circuit", + )); + } }; let witness_map = WitnessMap::from(witness_map); @@ -71,7 +75,11 @@ pub fn get_public_parameters_witness( let circuit = match program.functions.len() { 0 => return Ok(JsWitnessMap::from(WitnessMap::new())), 1 => &program.functions[0], - _ => return Err(JsString::from("Program contains multiple circuits however ACVM currently only supports programs containing a single circuit")) + _ => { + return Err(JsString::from( + "Program contains multiple circuits however ACVM currently only supports programs containing a single circuit", + )); + } }; let witness_map = WitnessMap::from(solved_witness); @@ -98,7 +106,11 @@ pub fn get_public_witness( let circuit = match program.functions.len() { 0 => return Ok(JsWitnessMap::from(WitnessMap::new())), 1 => &program.functions[0], - _ => return Err(JsString::from("Program contains multiple circuits however ACVM currently only supports programs containing a single circuit")) + _ => { + return Err(JsString::from( + "Program contains multiple circuits however ACVM currently only supports programs containing a single circuit", + )); + } }; let witness_map = WitnessMap::from(solved_witness); diff --git a/acvm-repo/blackbox_solver/src/bigint.rs b/acvm-repo/blackbox_solver/src/bigint.rs index f7be1e80a55..6b70d51d791 100644 --- a/acvm-repo/blackbox_solver/src/bigint.rs +++ b/acvm-repo/blackbox_solver/src/bigint.rs @@ -231,7 +231,10 @@ fn all_allowed_bigint_moduli_are_prime() { Primality::No => panic!("not all allowed_bigint_moduli are prime: {modulus}"), Primality::Probable(probability) => { if probability < 0.90 { - panic!("not all allowed_bigint_moduli are prime within the allowed probability: {} < 0.90", probability); + panic!( + "not all allowed_bigint_moduli are prime within the allowed probability: {} < 0.90", + probability + ); } } } diff --git a/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs b/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs index 3aa735388ca..8a501b7e8a2 100644 --- a/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs +++ b/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs @@ -430,7 +430,7 @@ lazy_static! { }; } -impl<'a> Poseidon2<'a> { +impl Poseidon2<'_> { pub(crate) fn new() -> Self { Poseidon2 { config: &POSEIDON2_CONFIG } } diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index 27759012335..157ed9638ca 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -548,69 +548,99 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> VM<'a, F, B> { destinations.iter().zip(destination_value_types).zip(&values) { match (destination, value_type) { - (ValueOrArray::MemoryAddress(value_index), HeapValueType::Simple(bit_size)) => { - match output { - ForeignCallParam::Single(value) => { - self.write_value_to_memory(*value_index, value, *bit_size)?; - } - _ => return Err(format!( - "Function result size does not match brillig bytecode. Expected 1 result but got {output:?}") - ), - } - } - ( - ValueOrArray::HeapArray(HeapArray { pointer: pointer_index, size }), - HeapValueType::Array { value_types, size: type_size }, - ) if size == type_size => { - if HeapValueType::all_simple(value_types) { + (ValueOrArray::MemoryAddress(value_index), HeapValueType::Simple(bit_size)) => { match output { - ForeignCallParam::Array(values) => { - if values.len() != *size { - // foreign call returning flattened values into a nested type, so the sizes do not match - let destination = self.memory.read_ref(*pointer_index); - let return_type = value_type; - let mut flatten_values_idx = 0; //index of values read from flatten_values - self.write_slice_of_values_to_memory(destination, &output.fields(), &mut flatten_values_idx, return_type)?; - } else { - self.write_values_to_memory_slice(*pointer_index, values, value_types)?; - } + ForeignCallParam::Single(value) => { + self.write_value_to_memory(*value_index, value, *bit_size)?; } _ => { - return Err("Function result size does not match brillig bytecode size".to_string()); + return Err(format!( + "Function result size does not match brillig bytecode. Expected 1 result but got {output:?}" + )); } } - } else { - // foreign call returning flattened values into a nested type, so the sizes do not match - let destination = self.memory.read_ref(*pointer_index); - let return_type = value_type; - let mut flatten_values_idx = 0; //index of values read from flatten_values - self.write_slice_of_values_to_memory(destination, &output.fields(), &mut flatten_values_idx, return_type)?; - } - } - ( - ValueOrArray::HeapVector(HeapVector {pointer: pointer_index, size: size_index }), - HeapValueType::Vector { value_types }, - ) => { - if HeapValueType::all_simple(value_types) { - match output { - ForeignCallParam::Array(values) => { - // Set our size in the size address - self.memory.write(*size_index, values.len().into()); - self.write_values_to_memory_slice(*pointer_index, values, value_types)?; - + } + ( + ValueOrArray::HeapArray(HeapArray { pointer: pointer_index, size }), + HeapValueType::Array { value_types, size: type_size }, + ) if size == type_size => { + if HeapValueType::all_simple(value_types) { + match output { + ForeignCallParam::Array(values) => { + if values.len() != *size { + // foreign call returning flattened values into a nested type, so the sizes do not match + let destination = self.memory.read_ref(*pointer_index); + let return_type = value_type; + let mut flatten_values_idx = 0; //index of values read from flatten_values + self.write_slice_of_values_to_memory( + destination, + &output.fields(), + &mut flatten_values_idx, + return_type, + )?; + } else { + self.write_values_to_memory_slice( + *pointer_index, + values, + value_types, + )?; + } + } + _ => { + return Err( + "Function result size does not match brillig bytecode size" + .to_string(), + ); + } } - _ => { - return Err("Function result size does not match brillig bytecode size".to_string()); + } else { + // foreign call returning flattened values into a nested type, so the sizes do not match + let destination = self.memory.read_ref(*pointer_index); + let return_type = value_type; + let mut flatten_values_idx = 0; //index of values read from flatten_values + self.write_slice_of_values_to_memory( + destination, + &output.fields(), + &mut flatten_values_idx, + return_type, + )?; + } + } + ( + ValueOrArray::HeapVector(HeapVector { + pointer: pointer_index, + size: size_index, + }), + HeapValueType::Vector { value_types }, + ) => { + if HeapValueType::all_simple(value_types) { + match output { + ForeignCallParam::Array(values) => { + // Set our size in the size address + self.memory.write(*size_index, values.len().into()); + self.write_values_to_memory_slice( + *pointer_index, + values, + value_types, + )?; + } + _ => { + return Err( + "Function result size does not match brillig bytecode size" + .to_string(), + ); + } } + } else { + unimplemented!("deflattening heap vectors from foreign calls"); } - } else { - unimplemented!("deflattening heap vectors from foreign calls"); + } + _ => { + return Err(format!( + "Unexpected value type {value_type:?} for destination {destination:?}" + )); } } - _ => { - return Err(format!("Unexpected value type {value_type:?} for destination {destination:?}")); - } - } } let _ = diff --git a/acvm-repo/brillig_vm/src/memory.rs b/acvm-repo/brillig_vm/src/memory.rs index 73443384efa..5c04858fea7 100644 --- a/acvm-repo/brillig_vm/src/memory.rs +++ b/acvm-repo/brillig_vm/src/memory.rs @@ -18,7 +18,9 @@ pub enum MemoryValue { #[derive(Debug, thiserror::Error)] pub enum MemoryTypeError { - #[error("Bit size for value {value_bit_size} does not match the expected bit size {expected_bit_size}")] + #[error( + "Bit size for value {value_bit_size} does not match the expected bit size {expected_bit_size}" + )] MismatchedBitSize { value_bit_size: u32, expected_bit_size: u32 }, } diff --git a/compiler/noirc_evaluator/src/acir/acir_variable.rs b/compiler/noirc_evaluator/src/acir/acir_variable.rs index edd5c8e649c..a19edd3eb90 100644 --- a/compiler/noirc_evaluator/src/acir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/acir/acir_variable.rs @@ -80,7 +80,7 @@ impl From for AcirType { } } -impl<'a> From<&'a SsaType> for AcirType { +impl From<&SsaType> for AcirType { fn from(value: &SsaType) -> Self { match value { SsaType::Numeric(numeric_type) => AcirType::NumericType(*numeric_type), @@ -278,7 +278,7 @@ impl> AcirContext { let var_data = match self.vars.get(&var) { Some(var_data) => var_data, None => { - return Err(InternalError::UndeclaredAcirVar { call_stack: self.get_call_stack() }) + return Err(InternalError::UndeclaredAcirVar { call_stack: self.get_call_stack() }); } }; Ok(var_data.to_expression().into_owned()) diff --git a/compiler/noirc_evaluator/src/acir/black_box.rs b/compiler/noirc_evaluator/src/acir/black_box.rs index 7b386d6c188..0f7eba69150 100644 --- a/compiler/noirc_evaluator/src/acir/black_box.rs +++ b/compiler/noirc_evaluator/src/acir/black_box.rs @@ -35,7 +35,7 @@ impl> AcirContext { name: "poseidon_2_permutation call".to_string(), arg: "length".to_string(), call_stack: self.get_call_stack(), - })) + })); } }; @@ -45,7 +45,7 @@ impl> AcirContext { return Err(RuntimeError::InternalError(InternalError::NotAConstant { name: "length".to_string(), call_stack: self.get_call_stack(), - })) + })); } }; @@ -160,7 +160,7 @@ impl> AcirContext { name: "verify proof".to_string(), arg: "proof type".to_string(), call_stack: self.get_call_stack(), - })) + })); } }; @@ -170,7 +170,7 @@ impl> AcirContext { return Err(RuntimeError::InternalError(InternalError::NotAConstant { name: "proof type".to_string(), call_stack: self.get_call_stack(), - })) + })); } }; diff --git a/compiler/noirc_evaluator/src/acir/generated_acir.rs b/compiler/noirc_evaluator/src/acir/generated_acir.rs index 141c9f367c2..e20028e56df 100644 --- a/compiler/noirc_evaluator/src/acir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/acir/generated_acir.rs @@ -788,7 +788,10 @@ fn intrinsics_check_inputs(name: BlackBoxFunc, input_count: usize) { None => return, }; - assert_eq!(expected_num_inputs,input_count,"Tried to call black box function {name} with {input_count} inputs, but this function's definition requires {expected_num_inputs} inputs"); + assert_eq!( + expected_num_inputs, input_count, + "Tried to call black box function {name} with {input_count} inputs, but this function's definition requires {expected_num_inputs} inputs" + ); } /// Checks that the number of outputs being used to call the blackbox function @@ -818,5 +821,8 @@ fn intrinsics_check_outputs(name: BlackBoxFunc, output_count: usize) { None => return, }; - assert_eq!(expected_num_outputs,output_count,"Tried to call black box function {name} with {output_count} outputs, but this function's definition requires {expected_num_outputs} outputs"); + assert_eq!( + expected_num_outputs, output_count, + "Tried to call black box function {name} with {output_count} outputs, but this function's definition requires {expected_num_outputs} outputs" + ); } diff --git a/compiler/noirc_evaluator/src/acir/mod.rs b/compiler/noirc_evaluator/src/acir/mod.rs index f4d771d96d9..ac16afb47f0 100644 --- a/compiler/noirc_evaluator/src/acir/mod.rs +++ b/compiler/noirc_evaluator/src/acir/mod.rs @@ -404,11 +404,15 @@ impl<'a> Context<'a> { match inline_type { InlineType::Inline | InlineType::InlineAlways => { if function.id() != ssa.main_id { - panic!("ACIR function should have been inlined earlier if not marked otherwise"); + panic!( + "ACIR function should have been inlined earlier if not marked otherwise" + ); } } InlineType::NoPredicates => { - panic!("All ACIR functions marked with #[no_predicates] should be inlined before ACIR gen. This is an SSA exclusive codegen attribute"); + panic!( + "All ACIR functions marked with #[no_predicates] should be inlined before ACIR gen. This is an SSA exclusive codegen attribute" + ); } InlineType::Fold => {} } @@ -863,7 +867,11 @@ impl<'a> Context<'a> { let func = &ssa.functions[id]; match func.runtime() { RuntimeType::Acir(inline_type) => { - assert!(!matches!(inline_type, InlineType::Inline), "ICE: Got an ACIR function named {} that should have already been inlined", func.name()); + assert!( + !matches!(inline_type, InlineType::Inline), + "ICE: Got an ACIR function named {} that should have already been inlined", + func.name() + ); let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); let output_count = result_ids @@ -874,7 +882,9 @@ impl<'a> Context<'a> { .sum(); let Some(acir_function_id) = ssa.get_entry_point_index(id) else { - unreachable!("Expected an associated final index for call to acir function {id} with args {arguments:?}"); + unreachable!( + "Expected an associated final index for call to acir function {id} with args {arguments:?}" + ); }; let output_vars = self.acir_context.call_acir_function( @@ -956,7 +966,11 @@ impl<'a> Context<'a> { }; // Compiler sanity check - assert_eq!(result_ids.len(), output_values.len(), "ICE: The number of Brillig output values should match the result ids in SSA"); + assert_eq!( + result_ids.len(), + output_values.len(), + "ICE: The number of Brillig output values should match the result ids in SSA" + ); self.handle_ssa_call_outputs(result_ids, output_values, dfg)?; } @@ -1076,7 +1090,7 @@ impl<'a> Context<'a> { found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), } - .into()) + .into()); } }; // Ensure that array id is fully resolved. @@ -1477,7 +1491,7 @@ impl<'a> Context<'a> { found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), } - .into()) + .into()); } }; @@ -1525,7 +1539,11 @@ impl<'a> Context<'a> { let value_types = self.convert_value(array, dfg).flat_numeric_types(); // Compiler sanity check - assert_eq!(value_types.len(), array_len, "ICE: The length of the flattened type array should match the length of the dynamic array"); + assert_eq!( + value_types.len(), + array_len, + "ICE: The length of the flattened type array should match the length of the dynamic array" + ); let result_value = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, @@ -1675,7 +1693,7 @@ impl<'a> Context<'a> { found: format!("{:?}", array_acir_value), call_stack: self.acir_context.get_call_stack(), } - .into()) + .into()); } } } @@ -1685,7 +1703,7 @@ impl<'a> Context<'a> { found: format!("{:?}", &dfg[array_id]), call_stack: self.acir_context.get_call_stack(), } - .into()) + .into()); } }; } @@ -2179,7 +2197,9 @@ impl<'a> Context<'a> { Ok(self.convert_vars_to_values(vars, dfg, result_ids)) } Intrinsic::ApplyRangeConstraint => { - unreachable!("ICE: `Intrinsic::ApplyRangeConstraint` calls should be transformed into an `Instruction::RangeCheck`"); + unreachable!( + "ICE: `Intrinsic::ApplyRangeConstraint` calls should be transformed into an `Instruction::RangeCheck`" + ); } Intrinsic::ToRadix(endian) => { let field = self.convert_value(arguments[0], dfg).into_var()?; diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs index 957ebc2b069..141b4f58408 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs @@ -88,7 +88,7 @@ pub(crate) fn gen_brillig_for( return Err(InternalError::General { message: format!("Cannot find linked fn {unresolved_fn_label}"), call_stack: CallStack::new(), - }) + }); } }; entry_point.link_with(artifact); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 1fc39b58223..82414b760cf 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -326,7 +326,9 @@ pub(crate) fn convert_black_box_call { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 40dd825be35..1b34cbecfc2 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -460,7 +460,9 @@ impl<'block, Registers: RegisterAllocator> BrilligBlock<'block, Registers> { element_size, ); } else { - unreachable!("ICE: a vector must be preceded by a register containing its length"); + unreachable!( + "ICE: a vector must be preceded by a register containing its length" + ); } self.brillig_context.deallocate_heap_vector(*heap_vector); } @@ -1432,7 +1434,9 @@ impl<'block, Registers: RegisterAllocator> BrilligBlock<'block, Registers> { NumericType::Unsigned { .. } => (false, false), NumericType::NativeField => (true, false), }, - _ => unreachable!("only numeric types are allowed in binary operations. References are handled separately"), + _ => unreachable!( + "only numeric types are allowed in binary operations. References are handled separately" + ), }; let brillig_binary_op = match binary.operator { @@ -1991,14 +1995,21 @@ impl<'block, Registers: RegisterAllocator> BrilligBlock<'block, Registers> { self.allocate_foreign_call_result_array(element_type, inner_array); // We add one since array.pointer points to [RC, ...items] - let idx = - self.brillig_context.make_usize_constant_instruction((index + 1).into() ); - self.brillig_context.codegen_store_with_offset(array.pointer, idx, inner_array.pointer); + let idx = self + .brillig_context + .make_usize_constant_instruction((index + 1).into()); + self.brillig_context.codegen_store_with_offset( + array.pointer, + idx, + inner_array.pointer, + ); self.brillig_context.deallocate_single_addr(idx); self.brillig_context.deallocate_register(inner_array.pointer); } - Type::Slice(_) => unreachable!("ICE: unsupported slice type in allocate_nested_array(), expects an array or a numeric type"), + Type::Slice(_) => unreachable!( + "ICE: unsupported slice type in allocate_nested_array(), expects an array or a numeric type" + ), _ => (), } index += 1; diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs index 99645f84ed3..b15259da072 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs @@ -8,7 +8,7 @@ use crate::brillig::brillig_ir::{ use super::brillig_block::BrilligBlock; -impl<'block, Registers: RegisterAllocator> BrilligBlock<'block, Registers> { +impl BrilligBlock<'_, Registers> { fn write_variables(&mut self, write_pointer: MemoryAddress, variables: &[BrilligVariable]) { for (index, variable) in variables.iter().enumerate() { self.brillig_context.store_instruction(write_pointer, variable.extract_register()); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs index c9223715042..7f510751cb1 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs @@ -304,25 +304,37 @@ impl BrilligArtifact { let jump_instruction = self.byte_code[*location_of_jump].clone(); match jump_instruction { BrilligOpcode::Jump { location } => { - assert_eq!(location, 0, "location is not zero, which means that the jump label does not need resolving"); + assert_eq!( + location, 0, + "location is not zero, which means that the jump label does not need resolving" + ); self.byte_code[*location_of_jump] = BrilligOpcode::Jump { location: resolved_location }; } BrilligOpcode::JumpIfNot { condition, location } => { - assert_eq!(location, 0, "location is not zero, which means that the jump label does not need resolving"); + assert_eq!( + location, 0, + "location is not zero, which means that the jump label does not need resolving" + ); self.byte_code[*location_of_jump] = BrilligOpcode::JumpIfNot { condition, location: resolved_location }; } BrilligOpcode::JumpIf { condition, location } => { - assert_eq!(location, 0, "location is not zero, which means that the jump label does not need resolving"); + assert_eq!( + location, 0, + "location is not zero, which means that the jump label does not need resolving" + ); self.byte_code[*location_of_jump] = BrilligOpcode::JumpIf { condition, location: resolved_location }; } BrilligOpcode::Call { location } => { - assert_eq!(location, 0, "location is not zero, which means that the call label does not need resolving"); + assert_eq!( + location, 0, + "location is not zero, which means that the call label does not need resolving" + ); self.byte_code[*location_of_jump] = BrilligOpcode::Call { location: resolved_location }; diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index e1d46e9a81f..202124f7931 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -34,7 +34,9 @@ pub enum RuntimeError { UnInitialized { name: String, call_stack: CallStack }, #[error("Integer sized {num_bits:?} is over the max supported size of {max_num_bits:?}")] UnsupportedIntegerSize { num_bits: u32, max_num_bits: u32, call_stack: CallStack }, - #[error("Integer {value}, sized {num_bits:?}, is over the max supported size of {max_num_bits:?} for the blackbox function's inputs")] + #[error( + "Integer {value}, sized {num_bits:?}, is over the max supported size of {max_num_bits:?} for the blackbox function's inputs" + )] InvalidBlackBoxInputBitSize { value: String, num_bits: u32, @@ -59,7 +61,9 @@ pub enum RuntimeError { UnconstrainedSliceReturnToConstrained { call_stack: CallStack }, #[error("All `oracle` methods should be wrapped in an unconstrained fn")] UnconstrainedOracleReturnToConstrained { call_stack: CallStack }, - #[error("Could not resolve some references to the array. All references must be resolved at compile time")] + #[error( + "Could not resolve some references to the array. All references must be resolved at compile time" + )] UnknownReference { call_stack: CallStack }, } diff --git a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs index 88b4fb18fb3..dee10dfbecf 100644 --- a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs +++ b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs @@ -431,8 +431,10 @@ impl DependencyContext { if let Some(value_id) = self.memory_slots.get(address) { self.update_children(&[*value_id], &results); } else { - panic!("load instruction {} has attempted to access previously unused memory location", - instruction); + panic!( + "load instruction {} has attempted to access previously unused memory location", + instruction + ); } } // Record the condition to set as future parent for the following values @@ -502,7 +504,10 @@ impl DependencyContext { RuntimeType::Brillig(..) => {} }, Value::ForeignFunction(..) => { - panic!("should not be able to reach foreign function from non-Brillig functions, {func_id} in function {}", function.name()); + panic!( + "should not be able to reach foreign function from non-Brillig functions, {func_id} in function {}", + function.name() + ); } Value::Instruction { .. } | Value::NumericConstant { .. } @@ -826,13 +831,18 @@ impl Context { } }, Value::ForeignFunction(..) => { - panic!("Should not be able to reach foreign function from non-Brillig functions, {func_id} in function {}", function.name()); + panic!( + "Should not be able to reach foreign function from non-Brillig functions, {func_id} in function {}", + function.name() + ); } Value::Instruction { .. } | Value::NumericConstant { .. } | Value::Param { .. } | Value::Global(_) => { - panic!("At the point we are running disconnect there shouldn't be any other values as arguments") + panic!( + "At the point we are running disconnect there shouldn't be any other values as arguments" + ) } } } diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index e4c00358c8c..c2a3c346d28 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -85,7 +85,11 @@ impl FunctionBuilder { /// This should only be used immediately following construction of a FunctionBuilder /// and will panic if there are any already finished functions. pub(crate) fn set_runtime(&mut self, runtime: RuntimeType) { - assert_eq!(self.finished_functions.len(), 0, "Attempted to set runtime on a FunctionBuilder with finished functions. A FunctionBuilder's runtime should only be set on its initial function"); + assert_eq!( + self.finished_functions.len(), + 0, + "Attempted to set runtime on a FunctionBuilder with finished functions. A FunctionBuilder's runtime should only be set on its initial function" + ); self.current_function.set_runtime(runtime); } diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index adf0bc23b98..d78d5c5ebb7 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -340,14 +340,19 @@ impl DataFlowGraph { } } let mut instructions = instructions.unwrap_or(vec![instruction]); - assert!(!instructions.is_empty(), "`SimplifyResult::SimplifiedToInstructionMultiple` must not return empty vector"); + assert!( + !instructions.is_empty(), + "`SimplifyResult::SimplifiedToInstructionMultiple` must not return empty vector" + ); if instructions.len() > 1 { // There's currently no way to pass results from one instruction in `instructions` on to the next. // We then restrict this to only support multiple instructions if they're all `Instruction::Constrain` // as this instruction type does not have any results. assert!( - instructions.iter().all(|instruction| matches!(instruction, Instruction::Constrain(..))), + instructions + .iter() + .all(|instruction| matches!(instruction, Instruction::Constrain(..))), "`SimplifyResult::SimplifiedToInstructionMultiple` only supports `Constrain` instructions" ); } @@ -884,7 +889,7 @@ impl<'dfg> InsertInstructionResult<'dfg> { } } -impl<'dfg> std::ops::Index for InsertInstructionResult<'dfg> { +impl std::ops::Index for InsertInstructionResult<'_> { type Output = ValueId; fn index(&self, index: usize) -> &Self::Output { diff --git a/compiler/noirc_evaluator/src/ssa/ir/dom.rs b/compiler/noirc_evaluator/src/ssa/ir/dom.rs index 3dde6240e18..49862b49be3 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dom.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dom.rs @@ -136,10 +136,7 @@ impl DominatorTree { if let Some(value) = f(block_id) { return Some(value); } - block_id = match self.immediate_dominator(block_id) { - Some(immediate_dominator) => immediate_dominator, - None => return None, - } + block_id = self.immediate_dominator(block_id)?; } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs index 75fd53c22a3..a67c2d7a0f8 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs @@ -140,11 +140,11 @@ impl Binary { }; } - let lhs_is_zero = lhs_value.map_or(false, |lhs| lhs.is_zero()); - let rhs_is_zero = rhs_value.map_or(false, |rhs| rhs.is_zero()); + let lhs_is_zero = lhs_value.is_some_and(|lhs| lhs.is_zero()); + let rhs_is_zero = rhs_value.is_some_and(|rhs| rhs.is_zero()); - let lhs_is_one = lhs_value.map_or(false, |lhs| lhs.is_one()); - let rhs_is_one = rhs_value.map_or(false, |rhs| rhs.is_one()); + let lhs_is_one = lhs_value.is_some_and(|lhs| lhs.is_one()); + let rhs_is_one = rhs_value.is_some_and(|rhs| rhs.is_one()); match self.operator { BinaryOp::Add { .. } => { diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 6ee7aa0192c..cd83dd9ba78 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -170,7 +170,7 @@ pub(super) fn simplify_call( } Intrinsic::SlicePopBack => { let length = dfg.get_numeric_constant(arguments[0]); - if length.map_or(true, |length| length.is_zero()) { + if length.is_none_or(|length| length.is_zero()) { // If the length is zero then we're trying to pop the last element from an empty slice. // Defer the error to acir_gen. return SimplifyResult::None; @@ -185,7 +185,7 @@ pub(super) fn simplify_call( } Intrinsic::SlicePopFront => { let length = dfg.get_numeric_constant(arguments[0]); - if length.map_or(true, |length| length.is_zero()) { + if length.is_none_or(|length| length.is_zero()) { // If the length is zero then we're trying to pop the first element from an empty slice. // Defer the error to acir_gen. return SimplifyResult::None; @@ -243,7 +243,7 @@ pub(super) fn simplify_call( } Intrinsic::SliceRemove => { let length = dfg.get_numeric_constant(arguments[0]); - if length.map_or(true, |length| length.is_zero()) { + if length.is_none_or(|length| length.is_zero()) { // If the length is zero then we're trying to remove an element from an empty slice. // Defer the error to acir_gen. return SimplifyResult::None; @@ -718,10 +718,10 @@ fn simplify_derive_generators( ); let is_infinite = dfg.make_constant(FieldElement::zero(), NumericType::bool()); let mut results = Vec::new(); - for gen in generators { - let x_big: BigUint = gen.x.into(); + for generator in generators { + let x_big: BigUint = generator.x.into(); let x = FieldElement::from_be_bytes_reduce(&x_big.to_bytes_be()); - let y_big: BigUint = gen.y.into(); + let y_big: BigUint = generator.y.into(); let y = FieldElement::from_be_bytes_reduce(&y_big.to_bytes_be()); results.push(dfg.make_constant(x, NumericType::NativeField)); results.push(dfg.make_constant(y, NumericType::NativeField)); diff --git a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs index 05ceafcf450..ec648131547 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs @@ -39,7 +39,11 @@ impl Function { let reachable_blocks = self.reachable_blocks(); if !self.runtime().is_entry_point() { - assert_eq!(reachable_blocks.len(), 1, "Expected there to be 1 block remaining in Acir function for array_set optimization"); + assert_eq!( + reachable_blocks.len(), + 1, + "Expected there to be 1 block remaining in Acir function for array_set optimization" + ); } let mut context = Context::new(&self.dfg); diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 76f8495c009..1a9f4b6b622 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -262,7 +262,7 @@ fn flatten_function_cfg(function: &mut Function, no_predicates: &HashMap Context<'f> { +impl Context<'_> { fn flatten(&mut self, no_predicates: &HashMap) { // Flatten the CFG by inlining all instructions from the queued blocks // until all blocks have been flattened. diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs index 78091285208..7aad174d327 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs @@ -102,7 +102,9 @@ impl<'cfg> Context<'cfg> { } else if successors.len() == 1 { self.find_join_point(successors.next().unwrap()) } else if successors.len() == 0 { - unreachable!("return encountered before a join point was found. This can only happen if early-return was added to the language without implementing it by jmping to a join block first") + unreachable!( + "return encountered before a join point was found. This can only happen if early-return was added to the language without implementing it by jmping to a join block first" + ) } else { unreachable!("A block can only have 0, 1, or 2 successors"); } diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index e5753aeba4e..ba0c998216c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -278,7 +278,9 @@ impl InlineContext { if self.recursion_level > RECURSION_LIMIT { panic!( - "Attempted to recur more than {RECURSION_LIMIT} times during inlining function '{}':\n{}", source_function.name(), source_function + "Attempted to recur more than {RECURSION_LIMIT} times during inlining function '{}':\n{}", + source_function.name(), + source_function ); } @@ -349,10 +351,14 @@ impl<'function> PerFunctionContext<'function> { return id; } } - unreachable!("All Value::Instructions should already be known during inlining after creating the original inlined instruction. Unknown value {id} = {value:?}") + unreachable!( + "All Value::Instructions should already be known during inlining after creating the original inlined instruction. Unknown value {id} = {value:?}" + ) } value @ Value::Param { .. } => { - unreachable!("All Value::Params should already be known from previous calls to translate_block. Unknown value {id} = {value:?}") + unreachable!( + "All Value::Params should already be known from previous calls to translate_block. Unknown value {id} = {value:?}" + ) } Value::NumericConstant { constant, typ } => { // The dfg indexes a global's inner value directly, so we need to check here diff --git a/compiler/noirc_evaluator/src/ssa/opt/make_constrain_not_equal.rs b/compiler/noirc_evaluator/src/ssa/opt/make_constrain_not_equal.rs index 21f536eba2d..28e59f92429 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/make_constrain_not_equal.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/make_constrain_not_equal.rs @@ -39,7 +39,7 @@ impl Function { if self .dfg .get_numeric_constant(*rhs) - .map_or(false, |constant| constant.is_zero()) + .is_some_and(|constant| constant.is_zero()) { if let Value::Instruction { instruction, .. } = &self.dfg[self.dfg.resolve(*lhs)] diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index ce76825877a..b4b2dcb2e44 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -201,7 +201,7 @@ impl<'f> PerFunctionContext<'f> { let is_dereference = block .expressions .get(store_address) - .map_or(false, |expression| matches!(expression, Expression::Dereference(_))); + .is_some_and(|expression| matches!(expression, Expression::Dereference(_))); if !self.last_loads.contains_key(store_address) && !store_alias_used diff --git a/compiler/noirc_evaluator/src/ssa/opt/pure.rs b/compiler/noirc_evaluator/src/ssa/opt/pure.rs index d790d035eb0..b975413fc2e 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/pure.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/pure.rs @@ -171,7 +171,7 @@ impl Function { | Value::Instruction { .. } | Value::Param { .. } | Value::NumericConstant { .. } => { - return (Purity::Impure, BTreeSet::new()) + return (Purity::Impure, BTreeSet::new()); } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 942fe67b5d5..a2e66ccc616 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -92,7 +92,7 @@ impl Context { let condition_is_one = function .dfg .get_numeric_constant(*condition) - .map_or(false, |condition| condition.is_one()); + .is_some_and(|condition| condition.is_one()); if condition_is_one { new_instructions.push(instruction_id); last_side_effects_enabled_instruction = None; diff --git a/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs index 22fdf0a7987..e3cf3139d1c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs @@ -254,7 +254,9 @@ fn remove_block_parameters( let jump_args = match function.dfg[predecessor].unwrap_terminator_mut() { TerminatorInstruction::Jmp { arguments, .. } => std::mem::take(arguments), - TerminatorInstruction::JmpIf { .. } => unreachable!("If jmpif instructions are modified to support block arguments in the future, this match will need to be updated"), + TerminatorInstruction::JmpIf { .. } => unreachable!( + "If jmpif instructions are modified to support block arguments in the future, this match will need to be updated" + ), _ => unreachable!( "Predecessor was already validated to have only a single jmp destination" ), diff --git a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 547a8a042c6..f92d7ab4b15 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -490,9 +490,19 @@ impl Loop { context.inline_instructions_from_block(); // Mutate the terminator if possible so that it points at the iteration block. match context.dfg()[fresh_block].unwrap_terminator() { - TerminatorInstruction::JmpIf { condition, then_destination, else_destination, call_stack } => { + TerminatorInstruction::JmpIf { + condition, + then_destination, + else_destination, + call_stack, + } => { let condition = *condition; - let next_blocks = context.handle_jmpif(condition, *then_destination, *else_destination, *call_stack); + let next_blocks = context.handle_jmpif( + condition, + *then_destination, + *else_destination, + *call_stack, + ); // If there is only 1 next block the jmpif evaluated to a single known block. // This is the expected case and lets us know if we should loop again or not. @@ -515,7 +525,9 @@ impl Loop { Err(context.inserter.function.dfg.get_value_call_stack(condition)) } } - other => unreachable!("Expected loop header to terminate in a JmpIf to the loop body, but found {other:?} instead"), + other => unreachable!( + "Expected loop header to terminate in a JmpIf to the loop body, but found {other:?} instead" + ), } } diff --git a/compiler/noirc_evaluator/src/ssa/parser/lexer.rs b/compiler/noirc_evaluator/src/ssa/parser/lexer.rs index e22b6a661de..e6e15d1559d 100644 --- a/compiler/noirc_evaluator/src/ssa/parser/lexer.rs +++ b/compiler/noirc_evaluator/src/ssa/parser/lexer.rs @@ -171,7 +171,7 @@ impl<'a> Lexer<'a> { return Err(LexerError::InvalidIntegerLiteral { span: Span::inclusive(start, end), found: integer_str, - }) + }); } }; diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index bc62964cd49..a954ac3ab93 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -130,7 +130,7 @@ pub(crate) fn generate_ssa(program: Program) -> Result { Ok(ssa) } -impl<'a> FunctionContext<'a> { +impl FunctionContext<'_> { /// Codegen a function's body and set its return value to that of its last parameter. /// For functions returning nothing, this will be an empty list. fn codegen_function_body(&mut self, body: &Expression) -> Result<(), RuntimeError> { diff --git a/compiler/noirc_frontend/src/elaborator/enums.rs b/compiler/noirc_frontend/src/elaborator/enums.rs index 9e9df83a4db..3ee95355bc4 100644 --- a/compiler/noirc_frontend/src/elaborator/enums.rs +++ b/compiler/noirc_frontend/src/elaborator/enums.rs @@ -638,7 +638,7 @@ impl Elaborator<'_> { self.push_tests_against_bare_variables(&mut rows); // If the first row is a match-all we match it and the remaining rows are ignored. - if rows.first().map_or(false, |row| row.columns.is_empty()) { + if rows.first().is_some_and(|row| row.columns.is_empty()) { let row = rows.remove(0); return Ok(match row.guard { diff --git a/compiler/noirc_frontend/src/elaborator/expressions.rs b/compiler/noirc_frontend/src/elaborator/expressions.rs index 3c42478dfab..23164c57080 100644 --- a/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -39,7 +39,7 @@ use crate::{ use super::{Elaborator, LambdaContext, UnsafeBlockStatus}; -impl<'context> Elaborator<'context> { +impl Elaborator<'_> { pub(crate) fn elaborate_expression(&mut self, expr: Expression) -> (ExprId, Type) { self.elaborate_expression_with_target_type(expr, None) } @@ -59,7 +59,7 @@ impl<'context> Elaborator<'context> { ExpressionKind::Constrain(constrain) => self.elaborate_constrain(constrain), ExpressionKind::Constructor(constructor) => self.elaborate_constructor(*constructor), ExpressionKind::MemberAccess(access) => { - return self.elaborate_member_access(*access, expr.location) + return self.elaborate_member_access(*access, expr.location); } ExpressionKind::Cast(cast) => self.elaborate_cast(*cast, expr.location), ExpressionKind::Infix(infix) => return self.elaborate_infix(*infix, expr.location), @@ -71,11 +71,11 @@ impl<'context> Elaborator<'context> { self.elaborate_lambda_with_target_type(*lambda, target_type) } ExpressionKind::Parenthesized(expr) => { - return self.elaborate_expression_with_target_type(*expr, target_type) + return self.elaborate_expression_with_target_type(*expr, target_type); } ExpressionKind::Quote(quote) => self.elaborate_quote(quote, expr.location), ExpressionKind::Comptime(comptime, _) => { - return self.elaborate_comptime_block(comptime, expr.location, target_type) + return self.elaborate_comptime_block(comptime, expr.location, target_type); } ExpressionKind::Unsafe(unsafe_expression) => { self.elaborate_unsafe_block(unsafe_expression, target_type) @@ -476,7 +476,7 @@ impl<'context> Elaborator<'context> { } else { return self .call_macro(func, comptime_args, location, typ) - .unwrap_or_else(|| (HirExpression::Error, Type::Error)); + .unwrap_or((HirExpression::Error, Type::Error)); } } @@ -594,7 +594,7 @@ impl<'context> Elaborator<'context> { let args = function_call.arguments.clone(); return self .call_macro(function_call.func, args, location, typ) - .unwrap_or_else(|| (HirExpression::Error, Type::Error)); + .unwrap_or((HirExpression::Error, Type::Error)); } } (HirExpression::Call(function_call), typ) diff --git a/compiler/noirc_frontend/src/elaborator/lints.rs b/compiler/noirc_frontend/src/elaborator/lints.rs index 52cf130bc37..c60d41b85e2 100644 --- a/compiler/noirc_frontend/src/elaborator/lints.rs +++ b/compiler/noirc_frontend/src/elaborator/lints.rs @@ -67,7 +67,7 @@ pub(super) fn low_level_function_outside_stdlib( crate_id: CrateId, ) -> Option { let is_low_level_function = - modifiers.attributes.function().map_or(false, |func| func.is_low_level()); + modifiers.attributes.function().is_some_and(|func| func.is_low_level()); if !crate_id.is_stdlib() && is_low_level_function { let ident = func_meta_name_ident(func, modifiers); Some(ResolverError::LowLevelFunctionOutsideOfStdlib { ident }) @@ -81,7 +81,7 @@ pub(super) fn oracle_not_marked_unconstrained( func: &FuncMeta, modifiers: &FunctionModifiers, ) -> Option { - let is_oracle_function = modifiers.attributes.function().map_or(false, |func| func.is_oracle()); + let is_oracle_function = modifiers.attributes.function().is_some_and(|func| func.is_oracle()); if is_oracle_function && !modifiers.is_unconstrained { let ident = func_meta_name_ident(func, modifiers); Some(ResolverError::OracleMarkedAsConstrained { ident }) @@ -104,7 +104,7 @@ pub(super) fn oracle_called_from_constrained_function( } let function_attributes = interner.function_attributes(called_func); - let is_oracle_call = function_attributes.function().map_or(false, |func| func.is_oracle()); + let is_oracle_call = function_attributes.function().is_some_and(|func| func.is_oracle()); if is_oracle_call { Some(ResolverError::UnconstrainedOracleReturnToConstrained { location }) } else { @@ -346,7 +346,7 @@ fn can_return_without_recursing_match( HirMatch::Guard { cond: _, body, otherwise } => check(*body) && check_match(otherwise), HirMatch::Switch(_, cases, otherwise) => { cases.iter().all(|case| check_match(&case.body)) - && otherwise.as_ref().map_or(true, |case| check_match(case)) + && otherwise.as_ref().is_none_or(|case| check_match(case)) } } } diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs index 659bc72982c..3a6a2c8eaea 100644 --- a/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -2151,7 +2151,7 @@ impl<'context> Elaborator<'context> { /// Defaults to `true` if the current function is unknown. fn in_constrained_function(&self) -> bool { !self.in_comptime_context() - && self.current_item.map_or(true, |id| match id { + && self.current_item.is_none_or(|id| match id { DependencyId::Function(id) => { !self.interner.function_modifiers(&id).is_unconstrained } diff --git a/compiler/noirc_frontend/src/elaborator/options.rs b/compiler/noirc_frontend/src/elaborator/options.rs index 285d1ddfe59..58bb5e73a61 100644 --- a/compiler/noirc_frontend/src/elaborator/options.rs +++ b/compiler/noirc_frontend/src/elaborator/options.rs @@ -50,7 +50,7 @@ pub(crate) type ElaboratorOptions<'a> = GenericOptions<'a, fm::FileId>; /// CLI options that need to be passed to the compiler frontend (the elaborator). pub type FrontendOptions<'a> = GenericOptions<'a, &'a str>; -impl<'a, T> GenericOptions<'a, T> { +impl GenericOptions<'_, T> { /// A sane default of frontend options for running tests pub fn test_default() -> GenericOptions<'static, T> { GenericOptions { diff --git a/compiler/noirc_frontend/src/elaborator/path_resolution.rs b/compiler/noirc_frontend/src/elaborator/path_resolution.rs index bbe90e8db24..a1d3eb8caca 100644 --- a/compiler/noirc_frontend/src/elaborator/path_resolution.rs +++ b/compiler/noirc_frontend/src/elaborator/path_resolution.rs @@ -102,7 +102,7 @@ enum MethodLookupResult { FoundMultipleTraitMethods(Vec), } -impl<'context> Elaborator<'context> { +impl Elaborator<'_> { pub(super) fn resolve_path_or_error( &mut self, path: Path, diff --git a/compiler/noirc_frontend/src/elaborator/patterns.rs b/compiler/noirc_frontend/src/elaborator/patterns.rs index 94ec95ba9bf..f480f339a5d 100644 --- a/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -22,7 +22,7 @@ use crate::{ use super::{path_resolution::PathResolutionItem, Elaborator, ResolverMeta}; -impl<'context> Elaborator<'context> { +impl Elaborator<'_> { pub(super) fn elaborate_pattern( &mut self, pattern: Pattern, @@ -834,13 +834,13 @@ impl<'context> Elaborator<'context> { // Try to look it up as a global, but still issue the first error if we fail Some(Err(error)) => match self.lookup_global(path) { Ok((id, item)) => { - return ((HirIdent::non_trait_method(id, location), 0), Some(item)) + return ((HirIdent::non_trait_method(id, location), 0), Some(item)); } Err(_) => error, }, None => match self.lookup_global(path) { Ok((id, item)) => { - return ((HirIdent::non_trait_method(id, location), 0), Some(item)) + return ((HirIdent::non_trait_method(id, location), 0), Some(item)); } Err(error) => error, }, diff --git a/compiler/noirc_frontend/src/elaborator/scope.rs b/compiler/noirc_frontend/src/elaborator/scope.rs index b6cc789f38f..6e2649d6441 100644 --- a/compiler/noirc_frontend/src/elaborator/scope.rs +++ b/compiler/noirc_frontend/src/elaborator/scope.rs @@ -22,7 +22,7 @@ use super::{Elaborator, ResolverMeta}; type Scope = GenericScope; type ScopeTree = GenericScopeTree; -impl<'context> Elaborator<'context> { +impl Elaborator<'_> { pub fn module_id(&self) -> ModuleId { assert_ne!(self.local_module, LocalModuleId::dummy_id(), "local_module is unset"); ModuleId { krate: self.crate_id, local_id: self.local_module } @@ -186,7 +186,7 @@ impl<'context> Elaborator<'context> { /// This will also instantiate any struct types found. pub(super) fn lookup_type_or_error(&mut self, path: Path) -> Option { let ident = path.as_ident(); - if ident.map_or(false, |i| i == SELF_TYPE_NAME) { + if ident.is_some_and(|i| i == SELF_TYPE_NAME) { if let Some(typ) = &self.self_type { return Some(typ.clone()); } diff --git a/compiler/noirc_frontend/src/elaborator/statements.rs b/compiler/noirc_frontend/src/elaborator/statements.rs index bd41cad391e..2f287394c20 100644 --- a/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/compiler/noirc_frontend/src/elaborator/statements.rs @@ -22,7 +22,7 @@ use crate::{ use super::{lints, Elaborator, Loop}; -impl<'context> Elaborator<'context> { +impl Elaborator<'_> { fn elaborate_statement_value(&mut self, statement: Statement) -> (HirStatement, Type) { self.elaborate_statement_value_with_target_type(statement, None) } diff --git a/compiler/noirc_frontend/src/elaborator/trait_impls.rs b/compiler/noirc_frontend/src/elaborator/trait_impls.rs index 3ff757246f6..392b5e0ec46 100644 --- a/compiler/noirc_frontend/src/elaborator/trait_impls.rs +++ b/compiler/noirc_frontend/src/elaborator/trait_impls.rs @@ -19,7 +19,7 @@ use rustc_hash::FxHashSet as HashSet; use super::Elaborator; -impl<'context> Elaborator<'context> { +impl Elaborator<'_> { pub(super) fn collect_trait_impl_methods( &mut self, trait_id: TraitId, diff --git a/compiler/noirc_frontend/src/elaborator/traits.rs b/compiler/noirc_frontend/src/elaborator/traits.rs index bfd46c9fa8a..3b0666409da 100644 --- a/compiler/noirc_frontend/src/elaborator/traits.rs +++ b/compiler/noirc_frontend/src/elaborator/traits.rs @@ -20,7 +20,7 @@ use crate::{ use super::Elaborator; -impl<'context> Elaborator<'context> { +impl Elaborator<'_> { pub fn collect_traits(&mut self, traits: &mut BTreeMap) { for (trait_id, unresolved_trait) in traits { self.local_module = unresolved_trait.module_id; diff --git a/compiler/noirc_frontend/src/elaborator/types.rs b/compiler/noirc_frontend/src/elaborator/types.rs index d9010b03f43..62e8c398856 100644 --- a/compiler/noirc_frontend/src/elaborator/types.rs +++ b/compiler/noirc_frontend/src/elaborator/types.rs @@ -48,7 +48,7 @@ pub(super) struct TraitPathResolution { pub(super) errors: Vec, } -impl<'context> Elaborator<'context> { +impl Elaborator<'_> { /// Translates an UnresolvedType to a Type with a `TypeKind::Normal` pub(crate) fn resolve_type(&mut self, typ: UnresolvedType) -> Type { let location = typ.location; @@ -1008,7 +1008,9 @@ impl<'context> Elaborator<'context> { if from_is_polymorphic && from_value > to_maximum_size { let from = from.clone(); let to = to.clone(); - let reason = format!("casting untyped value ({from_value}) to a type with a maximum size ({to_maximum_size}) that's smaller than it"); + let reason = format!( + "casting untyped value ({from_value}) to a type with a maximum size ({to_maximum_size}) that's smaller than it" + ); // we warn that the 'to' type is too small for the value self.push_err(TypeCheckError::DownsizingCast { from, to, location, reason }); } diff --git a/compiler/noirc_frontend/src/elaborator/unquote.rs b/compiler/noirc_frontend/src/elaborator/unquote.rs index 8db9baaf575..56f9a694c9b 100644 --- a/compiler/noirc_frontend/src/elaborator/unquote.rs +++ b/compiler/noirc_frontend/src/elaborator/unquote.rs @@ -5,7 +5,7 @@ use crate::{ use super::Elaborator; -impl<'a> Elaborator<'a> { +impl Elaborator<'_> { /// Go through the given tokens looking for a '$' token followed by a variable to unquote. /// Each time these two tokens are found, they are replaced by a new UnquoteMarker token /// containing the ExprId of the resolved variable to unquote. diff --git a/compiler/noirc_frontend/src/hir/comptime/display.rs b/compiler/noirc_frontend/src/hir/comptime/display.rs index 338bb1259cf..de16415520b 100644 --- a/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -49,7 +49,7 @@ struct TokensPrettyPrinter<'tokens, 'interner> { indent: usize, } -impl<'tokens, 'interner> Display for TokensPrettyPrinter<'tokens, 'interner> { +impl Display for TokensPrettyPrinter<'_, '_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut token_printer = TokenPrettyPrinter::new(self.interner, self.indent); for token in self.tokens { @@ -327,7 +327,7 @@ pub struct ValuePrinter<'value, 'interner> { interner: &'interner NodeInterner, } -impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { +impl Display for ValuePrinter<'_, '_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.value { Value::Unit => write!(f, "()"), @@ -494,7 +494,7 @@ pub struct TokenPrinter<'token, 'interner> { interner: &'interner NodeInterner, } -impl<'token, 'interner> Display for TokenPrinter<'token, 'interner> { +impl Display for TokenPrinter<'_, '_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.token { Token::QuotedType(id) => { diff --git a/compiler/noirc_frontend/src/hir/comptime/errors.rs b/compiler/noirc_frontend/src/hir/comptime/errors.rs index a5b1cbbbf2c..8fff67ad835 100644 --- a/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -462,7 +462,9 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { InterpreterError::NonIntegerArrayLength { typ, err, location } => { let msg = format!("Non-integer array length: `{typ}`"); let secondary = if let Some(err) = err { - format!("Array lengths must be integers, but evaluating `{typ}` resulted in `{err}`") + format!( + "Array lengths must be integers, but evaluating `{typ}` resulted in `{err}`" + ) } else { "Array lengths must be integers".to_string() }; diff --git a/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index ac9da429491..dcc938faf2a 100644 --- a/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -542,7 +542,9 @@ impl HirArrayLiteral { let expr_kind = ExpressionKind::Literal(literal); Box::new(Expression::new(expr_kind, location)) } - other => panic!("Cannot convert non-constant type for repeated array literal from Hir -> Ast: {other:?}"), + other => panic!( + "Cannot convert non-constant type for repeated array literal from Hir -> Ast: {other:?}" + ), }; ArrayLiteral::Repeated { repeated_element, length } } diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 49c94f7bd25..9a5f31a90fe 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -49,7 +49,7 @@ use super::Interpreter; pub(crate) mod builtin_helpers; -impl<'local, 'context> Interpreter<'local, 'context> { +impl Interpreter<'_, '_> { pub(super) fn call_builtin( &mut self, name: &str, @@ -561,7 +561,10 @@ fn struct_def_fields( if actual != expected { let s = if expected == 1 { "" } else { "s" }; let was_were = if actual == 1 { "was" } else { "were" }; - let message = Some(format!("`StructDefinition::fields` expected {expected} generic{s} for `{}` but {actual} {was_were} given", struct_def.name)); + let message = Some(format!( + "`StructDefinition::fields` expected {expected} generic{s} for `{}` but {actual} {was_were} given", + struct_def.name + )); let location = args_location; let call_stack = call_stack.clone(); return Err(InterpreterError::FailingConstraint { message, location, call_stack }); @@ -3023,10 +3026,10 @@ fn derive_generators( let y_field_name: Rc = Rc::new("y".to_owned()); let is_infinite_field_name: Rc = Rc::new("is_infinite".to_owned()); let mut results = Vector::new(); - for gen in generators { - let x_big: BigUint = gen.x.into(); + for generator in generators { + let x_big: BigUint = generator.x.into(); let x = FieldElement::from_be_bytes_reduce(&x_big.to_bytes_be()); - let y_big: BigUint = gen.y.into(); + let y_big: BigUint = generator.y.into(); let y = FieldElement::from_be_bytes_reduce(&y_big.to_bytes_be()); let mut embedded_curve_point_fields = HashMap::default(); embedded_curve_point_fields.insert(x_field_name.clone(), Value::Field(x)); diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs index e880dd4b1c8..5c1542c4045 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs @@ -25,7 +25,7 @@ use super::{ Interpreter, }; -impl<'local, 'context> Interpreter<'local, 'context> { +impl Interpreter<'_, '_> { pub(super) fn call_foreign( &mut self, name: &str, @@ -234,7 +234,7 @@ fn blake_hash( /// signature: [u8; 64], /// message_hash: [u8; N], /// ) -> bool - +/// /// pub fn verify_signature_slice( /// public_key_x: [u8; 32], /// public_key_y: [u8; 32], diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs index 4ee935ff49e..fc4daa22edb 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs @@ -7,7 +7,7 @@ use crate::{ use super::Interpreter; -impl<'local, 'interner> Interpreter<'local, 'interner> { +impl Interpreter<'_, '_> { /// Evaluates any expressions within UnquoteMarkers in the given token list /// and replaces the expression held by the marker with the evaluated value /// in expression form. diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index ec52347f882..83382e0bb6c 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -116,7 +116,7 @@ pub fn collect_defs( errors } -impl<'a> ModCollector<'a> { +impl ModCollector<'_> { fn collect_attributes( &mut self, attributes: Vec, diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index daf30d93ad8..4e764daabfe 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -126,7 +126,9 @@ impl<'a> From<&'a UnsupportedNumericGenericType> for Diagnostic { let typ = &error.typ; Diagnostic::simple_error( - format!("{name} has a type of {typ}. The only supported numeric generic types are `u1`, `u8`, `u16`, and `u32`."), + format!( + "{name} has a type of {typ}. The only supported numeric generic types are `u1`, `u8`, `u16`, and `u32`." + ), "Unsupported numeric generic type".to_string(), error.ident.0.location(), ) diff --git a/compiler/noirc_frontend/src/hir/resolution/errors.rs b/compiler/noirc_frontend/src/hir/resolution/errors.rs index ea77d4dfdae..35731e6927e 100644 --- a/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -86,7 +86,9 @@ pub enum ResolverError { NestedSlices { location: Location }, #[error("#[abi(tag)] attribute is only allowed in contracts")] AbiAttributeOutsideContract { location: Location }, - #[error("Usage of the `#[foreign]` or `#[builtin]` function attributes are not allowed outside of the Noir standard library")] + #[error( + "Usage of the `#[foreign]` or `#[builtin]` function attributes are not allowed outside of the Noir standard library" + )] LowLevelFunctionOutsideOfStdlib { ident: Ident }, #[error( "Usage of the `#[oracle]` function attribute is only valid on unconstrained functions" diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index a6c9ee0cabb..ceb1dee9cb3 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -49,7 +49,9 @@ pub enum PathResolutionError { TurbofishNotAllowedOnItem { item: String, location: Location }, #[error("{ident} is a {kind}, not a module")] NotAModule { ident: Ident, kind: &'static str }, - #[error("trait `{trait_name}` which provides `{ident}` is implemented but not in scope, please import it")] + #[error( + "trait `{trait_name}` which provides `{ident}` is implemented but not in scope, please import it" + )] TraitMethodNotInScope { ident: Ident, trait_name: String }, #[error("Could not resolve '{ident}' in path")] UnresolvedWithPossibleTraitsToImport { ident: Ident, traits: Vec }, @@ -116,7 +118,10 @@ impl<'a> From<&'a PathResolutionError> for CustomDiagnostic { let traits = vecmap(traits, |trait_name| format!("`{}`", trait_name)); CustomDiagnostic::simple_error( error.to_string(), - format!("The following traits which provide `{ident}` are implemented but not in scope: {}", traits.join(", ")), + format!( + "The following traits which provide `{ident}` are implemented but not in scope: {}", + traits.join(", ") + ), ident.location(), ) } @@ -179,7 +184,7 @@ struct PathResolutionTargetResolver<'def_maps, 'references_tracker> { references_tracker: Option>, } -impl<'def_maps, 'references_tracker> PathResolutionTargetResolver<'def_maps, 'references_tracker> { +impl PathResolutionTargetResolver<'_, '_> { fn resolve(&mut self, path: Path) -> Result<(Path, ModuleId), PathResolutionError> { match path.kind { PathKind::Crate => self.resolve_crate_path(path), diff --git a/compiler/noirc_frontend/src/hir/resolution/visibility.rs b/compiler/noirc_frontend/src/hir/resolution/visibility.rs index c592175ffcb..1ae0037bc7d 100644 --- a/compiler/noirc_frontend/src/hir/resolution/visibility.rs +++ b/compiler/noirc_frontend/src/hir/resolution/visibility.rs @@ -61,7 +61,7 @@ pub(crate) fn module_descendent_of_target( def_map.modules[current.0] .parent - .map_or(false, |parent| module_descendent_of_target(def_map, target, parent)) + .is_some_and(|parent| module_descendent_of_target(def_map, target, parent)) } /// Returns true if `target` is a struct and its parent is `current`. diff --git a/compiler/noirc_frontend/src/hir/type_check/errors.rs b/compiler/noirc_frontend/src/hir/type_check/errors.rs index 5f348e32365..1fdc0b30f10 100644 --- a/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -116,11 +116,15 @@ pub enum TypeCheckError { InvalidInfixOp { kind: &'static str, location: Location }, #[error("{kind} cannot be used in a unary operation")] InvalidUnaryOp { kind: String, location: Location }, - #[error("Bitwise operations are invalid on Field types. Try casting the operands to a sized integer type first.")] + #[error( + "Bitwise operations are invalid on Field types. Try casting the operands to a sized integer type first." + )] FieldBitwiseOp { location: Location }, #[error("Integer cannot be used with type {typ}")] IntegerTypeMismatch { typ: Type, location: Location }, - #[error("Cannot use an integer and a Field in a binary operation, try converting the Field into an integer first")] + #[error( + "Cannot use an integer and a Field in a binary operation, try converting the Field into an integer first" + )] IntegerAndFieldBinaryOperation { location: Location }, #[error("Cannot do modulo on Fields, try casting to an integer first")] FieldModulo { location: Location }, @@ -128,9 +132,13 @@ pub enum TypeCheckError { FieldNot { location: Location }, #[error("Fields cannot be compared, try casting to an integer first")] FieldComparison { location: Location }, - #[error("The bit count in a bit-shift operation must fit in a u8, try casting the right hand side into a u8 first")] + #[error( + "The bit count in a bit-shift operation must fit in a u8, try casting the right hand side into a u8 first" + )] InvalidShiftSize { location: Location }, - #[error("The number of bits to use for this bitwise operation is ambiguous. Either the operand's type or return type should be specified")] + #[error( + "The number of bits to use for this bitwise operation is ambiguous. Either the operand's type or return type should be specified" + )] AmbiguousBitWidth { location: Location }, #[error("Error with additional context")] Context { err: Box, ctx: &'static str }, @@ -165,7 +173,9 @@ pub enum TypeCheckError { }, #[error("No matching impl found")] NoMatchingImplFound(NoMatchingImplFoundError), - #[error("Constraint for `{typ}: {trait_name}` is not needed, another matching impl is already in scope")] + #[error( + "Constraint for `{typ}: {trait_name}` is not needed, another matching impl is already in scope" + )] UnneededTraitConstraint { trait_name: String, typ: Type, location: Location }, #[error( "Expected {expected_count} generic(s) from this function, but {actual_count} were provided" @@ -185,7 +195,9 @@ pub enum TypeCheckError { UnconstrainedReferenceToConstrained { location: Location }, #[error("Slices cannot be returned from an unconstrained runtime to a constrained runtime")] UnconstrainedSliceReturnToConstrained { location: Location }, - #[error("Call to unconstrained function is unsafe and must be in an unconstrained function or unsafe block")] + #[error( + "Call to unconstrained function is unsafe and must be in an unconstrained function or unsafe block" + )] Unsafe { location: Location }, #[error("Converting an unconstrained fn to a non-unconstrained fn is unsafe")] UnsafeFn { location: Location }, diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 56c1e014c13..80a1aa5c5b1 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -2294,7 +2294,11 @@ impl Type { ) -> (Type, TypeBindings) { match self { Type::Forall(typevars, typ) => { - assert_eq!(types.len() + implicit_generic_count, typevars.len(), "Turbofish operator used with incorrect generic count which was not caught by name resolution"); + assert_eq!( + types.len() + implicit_generic_count, + typevars.len(), + "Turbofish operator used with incorrect generic count which was not caught by name resolution" + ); let bindings = (0..implicit_generic_count).map(|_| interner.next_type_variable()).chain(types); diff --git a/compiler/noirc_frontend/src/hir_def/types/arithmetic.rs b/compiler/noirc_frontend/src/hir_def/types/arithmetic.rs index ad35e1a11bf..83618bf3707 100644 --- a/compiler/noirc_frontend/src/hir_def/types/arithmetic.rs +++ b/compiler/noirc_frontend/src/hir_def/types/arithmetic.rs @@ -600,7 +600,7 @@ mod proptests { #[test] // Expect cases that don't resolve to constants, e.g. see // `arithmetic_generics_checked_cast_indirect_zeros` - #[should_panic(expected = "matches!(infix, Type :: Constant(..))")] + #[should_panic(expected = "matches!(infix, Type::Constant(..))")] fn instantiate_before_or_after_canonicalize(infix_type_bindings in arbitrary_infix_expr_with_bindings(10)) { let (infix, typ, bindings) = infix_type_bindings; diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index 0407a6eba95..0777c235ad9 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -443,7 +443,7 @@ impl<'a> Lexer<'a> { return Err(LexerErrorKind::InvalidIntegerLiteral { location: self.location(Span::inclusive(start, end)), found: integer_str, - }) + }); } }; @@ -879,7 +879,7 @@ impl<'a> Lexer<'a> { } } -impl<'a> Iterator for Lexer<'a> { +impl Iterator for Lexer<'_> { type Item = LocatedTokenResult; fn next(&mut self) -> Option { @@ -1604,7 +1604,9 @@ mod tests { tokens.pop(); match tokens.pop().unwrap() { Token::Quote(stream) => assert_eq!(stream.0.len(), expected_stream_length), - other => panic!("test_quote test failure! Expected a single TokenStream token, got {other} for input `{source}`") + other => panic!( + "test_quote test failure! Expected a single TokenStream token, got {other} for input `{source}`" + ), } } } diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index 6b8671dd8bf..0b4a4bad278 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -800,11 +800,11 @@ impl Attributes { } pub fn is_foldable(&self) -> bool { - self.function().map_or(false, |func_attribute| func_attribute.is_foldable()) + self.function().is_some_and(|func_attribute| func_attribute.is_foldable()) } pub fn is_no_predicates(&self) -> bool { - self.function().map_or(false, |func_attribute| func_attribute.is_no_predicates()) + self.function().is_some_and(|func_attribute| func_attribute.is_no_predicates()) } pub fn has_varargs(&self) -> bool { diff --git a/compiler/noirc_frontend/src/lib.rs b/compiler/noirc_frontend/src/lib.rs index 1c21870eb10..a6f57b84d6f 100644 --- a/compiler/noirc_frontend/src/lib.rs +++ b/compiler/noirc_frontend/src/lib.rs @@ -9,6 +9,8 @@ #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] #![warn(clippy::semicolon_if_nothing_returned)] +// Temporary allows. +#![allow(clippy::mutable_key_type, clippy::result_large_err)] pub mod ast; pub mod debug; diff --git a/compiler/noirc_frontend/src/monomorphization/debug.rs b/compiler/noirc_frontend/src/monomorphization/debug.rs index 44299d0e861..df2a0ada959 100644 --- a/compiler/noirc_frontend/src/monomorphization/debug.rs +++ b/compiler/noirc_frontend/src/monomorphization/debug.rs @@ -29,7 +29,7 @@ impl From for SourceFieldId { } } -impl<'interner> Monomorphizer<'interner> { +impl Monomorphizer<'_> { /// Patch instrumentation calls inserted for debugging. This will record /// tracked variables and their types, and assign them an ID to use at /// runtime. This ID is different from the source ID assigned at diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index cf6dc8a5d57..91132fb13f8 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -603,7 +603,9 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Lambda(lambda) => self.lambda(lambda, expr)?, HirExpression::MethodCall(hir_method_call) => { - unreachable!("Encountered HirExpression::MethodCall during monomorphization {hir_method_call:?}") + unreachable!( + "Encountered HirExpression::MethodCall during monomorphization {hir_method_call:?}" + ) } HirExpression::Error => unreachable!("Encountered Error node during monomorphization"), HirExpression::Quote(_) => unreachable!("quote expression remaining in runtime code"), @@ -1081,7 +1083,9 @@ impl<'interner> Monomorphizer<'interner> { .map_err(MonomorphizationError::InterpreterError)?; (expr, is_closure) } else { - unreachable!("All global values should be resolved at compile time and before monomorphization"); + unreachable!( + "All global values should be resolved at compile time and before monomorphization" + ); }; let expr = self.expr(expr)?; @@ -2076,7 +2080,9 @@ impl<'interner> Monomorphizer<'interner> { let zeroed_tuple = self.zeroed_value_of_type(fields, location); let fields_len = match &zeroed_tuple { ast::Expression::Tuple(fields) => fields.len() as u64, - _ => unreachable!("ICE: format string fields should be structured in a tuple, but got a {zeroed_tuple}"), + _ => unreachable!( + "ICE: format string fields should be structured in a tuple, but got a {zeroed_tuple}" + ), }; ast::Expression::Literal(ast::Literal::FmtStr( vec![FmtStrFragment::String("\0".repeat(*length as usize))], diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index dabb940996a..250d3f752ac 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -1158,7 +1158,9 @@ impl NodeInterner { HirStatement::Let(let_stmt) => Some(let_stmt.clone()), HirStatement::Error => None, other => { - panic!("ice: all globals should correspond to a let statement in the interner: {other:?}") + panic!( + "ice: all globals should correspond to a let statement in the interner: {other:?}" + ) } }, _ => panic!("ice: all globals should correspond to a statement in the interner"), diff --git a/compiler/noirc_frontend/src/parser/errors.rs b/compiler/noirc_frontend/src/parser/errors.rs index 3e25c4cf077..76e2958f668 100644 --- a/compiler/noirc_frontend/src/parser/errors.rs +++ b/compiler/noirc_frontend/src/parser/errors.rs @@ -62,7 +62,9 @@ pub enum ParserErrorReason { MissingSeparatingSemi, #[error("constrain keyword is deprecated")] ConstrainDeprecated, - #[error("Invalid type expression: '{0}'. Only unsigned integer constants up to `u32`, globals, generics, +, -, *, /, and % may be used in this context.")] + #[error( + "Invalid type expression: '{0}'. Only unsigned integer constants up to `u32`, globals, generics, +, -, *, /, and % may be used in this context." + )] InvalidTypeExpression(Expression), #[error("Early 'return' is unsupported")] EarlyReturn, diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index ccd064ffbb3..fee970dc7b0 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -65,7 +65,7 @@ enum TokenStream<'a> { Tokens(Tokens), } -impl<'a> TokenStream<'a> { +impl TokenStream<'_> { fn next(&mut self) -> Option { match self { TokenStream::Lexer(lexer) => lexer.next(), diff --git a/compiler/noirc_frontend/src/parser/parser/arguments.rs b/compiler/noirc_frontend/src/parser/parser/arguments.rs index 380f42809a6..808a516b862 100644 --- a/compiler/noirc_frontend/src/parser/parser/arguments.rs +++ b/compiler/noirc_frontend/src/parser/parser/arguments.rs @@ -7,7 +7,7 @@ pub(crate) struct CallArguments { pub(crate) is_macro_call: bool, } -impl<'a> Parser<'a> { +impl Parser<'_> { /// Arguments = '(' ArgumentsList? ')' /// /// ArgumentsList = Expression ( ',' Expression )? ','? diff --git a/compiler/noirc_frontend/src/parser/parser/attributes.rs b/compiler/noirc_frontend/src/parser/parser/attributes.rs index b5dd6c81e71..0d10744c441 100644 --- a/compiler/noirc_frontend/src/parser/parser/attributes.rs +++ b/compiler/noirc_frontend/src/parser/parser/attributes.rs @@ -10,7 +10,7 @@ use crate::token::{CustomAttribute, SecondaryAttribute}; use super::parse_many::without_separator; use super::Parser; -impl<'a> Parser<'a> { +impl Parser<'_> { /// InnerAttribute = '#![' SecondaryAttribute ']' pub(super) fn parse_inner_attribute(&mut self) -> Option { let start_location = self.current_token_location; diff --git a/compiler/noirc_frontend/src/parser/parser/doc_comments.rs b/compiler/noirc_frontend/src/parser/parser/doc_comments.rs index 5dd933503f3..14b79f158bf 100644 --- a/compiler/noirc_frontend/src/parser/parser/doc_comments.rs +++ b/compiler/noirc_frontend/src/parser/parser/doc_comments.rs @@ -5,7 +5,7 @@ use crate::{ use super::{parse_many::without_separator, Parser}; -impl<'a> Parser<'a> { +impl Parser<'_> { /// InnerDocComments = inner_doc_comment* pub(super) fn parse_inner_doc_comments(&mut self) -> Vec { self.parse_many("inner doc comments", without_separator(), Self::parse_inner_doc_comment) diff --git a/compiler/noirc_frontend/src/parser/parser/enums.rs b/compiler/noirc_frontend/src/parser/parser/enums.rs index d051e392caa..857238c4a42 100644 --- a/compiler/noirc_frontend/src/parser/parser/enums.rs +++ b/compiler/noirc_frontend/src/parser/parser/enums.rs @@ -11,7 +11,7 @@ use super::{ Parser, }; -impl<'a> Parser<'a> { +impl Parser<'_> { /// Enum = 'enum' identifier Generics '{' EnumVariant* '}' /// /// EnumField = OuterDocComments identifier ':' Type diff --git a/compiler/noirc_frontend/src/parser/parser/expression.rs b/compiler/noirc_frontend/src/parser/parser/expression.rs index c61b821a062..ee2476a71e8 100644 --- a/compiler/noirc_frontend/src/parser/parser/expression.rs +++ b/compiler/noirc_frontend/src/parser/parser/expression.rs @@ -20,7 +20,7 @@ use super::{ Parser, }; -impl<'a> Parser<'a> { +impl Parser<'_> { pub(crate) fn parse_expression_or_error(&mut self) -> Expression { self.parse_expression_or_error_impl(true) // allow constructors } @@ -1637,14 +1637,22 @@ mod tests { let multiply_or_divide_or_modulo = "1 * 2 / 3 % 4"; let expected_multiply_or_divide_or_modulo = "(((1 * 2) / 3) % 4)"; - let add_or_subtract = format!("{multiply_or_divide_or_modulo} + {multiply_or_divide_or_modulo} - {multiply_or_divide_or_modulo}"); - let expected_add_or_subtract = format!("(({expected_multiply_or_divide_or_modulo} + {expected_multiply_or_divide_or_modulo}) - {expected_multiply_or_divide_or_modulo})"); + let add_or_subtract = format!( + "{multiply_or_divide_or_modulo} + {multiply_or_divide_or_modulo} - {multiply_or_divide_or_modulo}" + ); + let expected_add_or_subtract = format!( + "(({expected_multiply_or_divide_or_modulo} + {expected_multiply_or_divide_or_modulo}) - {expected_multiply_or_divide_or_modulo})" + ); let shift = format!("{add_or_subtract} << {add_or_subtract} >> {add_or_subtract}"); - let expected_shift = format!("(({expected_add_or_subtract} << {expected_add_or_subtract}) >> {expected_add_or_subtract})"); + let expected_shift = format!( + "(({expected_add_or_subtract} << {expected_add_or_subtract}) >> {expected_add_or_subtract})" + ); let less_or_greater = format!("{shift} < {shift} > {shift} <= {shift} >= {shift}"); - let expected_less_or_greater = format!("(((({expected_shift} < {expected_shift}) > {expected_shift}) <= {expected_shift}) >= {expected_shift})"); + let expected_less_or_greater = format!( + "(((({expected_shift} < {expected_shift}) > {expected_shift}) <= {expected_shift}) >= {expected_shift})" + ); let xor = format!("{less_or_greater} ^ {less_or_greater}"); let expected_xor = format!("({expected_less_or_greater} ^ {expected_less_or_greater})"); diff --git a/compiler/noirc_frontend/src/parser/parser/function.rs b/compiler/noirc_frontend/src/parser/parser/function.rs index 8e98adc0a7b..1052f32a2ae 100644 --- a/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/compiler/noirc_frontend/src/parser/parser/function.rs @@ -30,7 +30,7 @@ pub(crate) struct FunctionDefinitionWithOptionalBody { pub(crate) return_visibility: Visibility, } -impl<'a> Parser<'a> { +impl Parser<'_> { /// Function = 'fn' identifier Generics FunctionParameters ( '->' Visibility Type )? WhereClause ( Block | ';' ) pub(crate) fn parse_function( &mut self, diff --git a/compiler/noirc_frontend/src/parser/parser/generics.rs b/compiler/noirc_frontend/src/parser/parser/generics.rs index 400458352f8..15f27d35a50 100644 --- a/compiler/noirc_frontend/src/parser/parser/generics.rs +++ b/compiler/noirc_frontend/src/parser/parser/generics.rs @@ -9,7 +9,7 @@ use crate::{ use super::{parse_many::separated_by_comma, Parser}; -impl<'a> Parser<'a> { +impl Parser<'_> { /// Generics = ( '<' GenericsList? '>' )? /// /// GenericsList = Generic ( ',' Generic )* ','? diff --git a/compiler/noirc_frontend/src/parser/parser/global.rs b/compiler/noirc_frontend/src/parser/parser/global.rs index 96a76b74ef0..9f4e2a5f932 100644 --- a/compiler/noirc_frontend/src/parser/parser/global.rs +++ b/compiler/noirc_frontend/src/parser/parser/global.rs @@ -11,7 +11,7 @@ use crate::{ use super::Parser; -impl<'a> Parser<'a> { +impl Parser<'_> { /// Global = 'global' identifier OptionalTypeAnnotation '=' Expression ';' pub(crate) fn parse_global( &mut self, diff --git a/compiler/noirc_frontend/src/parser/parser/impls.rs b/compiler/noirc_frontend/src/parser/parser/impls.rs index 4a930d5025c..0bc8b8c042d 100644 --- a/compiler/noirc_frontend/src/parser/parser/impls.rs +++ b/compiler/noirc_frontend/src/parser/parser/impls.rs @@ -17,7 +17,7 @@ pub(crate) enum Impl { TraitImpl(NoirTraitImpl), } -impl<'a> Parser<'a> { +impl Parser<'_> { /// Impl /// = TypeImpl /// | TraitImpl diff --git a/compiler/noirc_frontend/src/parser/parser/item_visibility.rs b/compiler/noirc_frontend/src/parser/parser/item_visibility.rs index de70c95cf7f..b91cc397015 100644 --- a/compiler/noirc_frontend/src/parser/parser/item_visibility.rs +++ b/compiler/noirc_frontend/src/parser/parser/item_visibility.rs @@ -5,7 +5,7 @@ use crate::{ use super::Parser; -impl<'a> Parser<'a> { +impl Parser<'_> { /// ItemVisibility /// = 'pub' // ItemVisibility::Public /// | 'pub' '(' 'crate' ')' // ItemVisibility::PublicCrate diff --git a/compiler/noirc_frontend/src/parser/parser/lambda.rs b/compiler/noirc_frontend/src/parser/parser/lambda.rs index a6eeb428621..a40f46ecbb4 100644 --- a/compiler/noirc_frontend/src/parser/parser/lambda.rs +++ b/compiler/noirc_frontend/src/parser/parser/lambda.rs @@ -6,7 +6,7 @@ use crate::{ use super::{parse_many::separated_by_comma, Parser}; -impl<'a> Parser<'a> { +impl Parser<'_> { /// Lambda = '|' LambdaParameters? '|' ( '->' Type )? Expression /// /// LambdaParameters = LambdaParameter ( ',' LambdaParameter )? ','? diff --git a/compiler/noirc_frontend/src/parser/parser/modifiers.rs b/compiler/noirc_frontend/src/parser/parser/modifiers.rs index 23d8623b679..896a27c0416 100644 --- a/compiler/noirc_frontend/src/parser/parser/modifiers.rs +++ b/compiler/noirc_frontend/src/parser/parser/modifiers.rs @@ -13,7 +13,7 @@ pub(crate) struct Modifiers { pub(crate) mutable: Option, } -impl<'a> Parser<'a> { +impl Parser<'_> { /// Modifiers = ItemVisibility 'unconstrained'? 'comptime'? 'mut'? /// /// NOTE: we also allow `unconstrained` before the visibility for backwards compatibility. diff --git a/compiler/noirc_frontend/src/parser/parser/module.rs b/compiler/noirc_frontend/src/parser/parser/module.rs index 6fbf3c14126..9546a4f8cfa 100644 --- a/compiler/noirc_frontend/src/parser/parser/module.rs +++ b/compiler/noirc_frontend/src/parser/parser/module.rs @@ -8,7 +8,7 @@ use crate::{ use super::Parser; -impl<'a> Parser<'a> { +impl Parser<'_> { /// ModOrContract /// = ( 'mod' | 'contract' ) identifier ( '{' Module '}' | ';' ) pub(super) fn parse_mod_or_contract( diff --git a/compiler/noirc_frontend/src/parser/parser/path.rs b/compiler/noirc_frontend/src/parser/parser/path.rs index b6262dc1352..b2d0c713cf3 100644 --- a/compiler/noirc_frontend/src/parser/parser/path.rs +++ b/compiler/noirc_frontend/src/parser/parser/path.rs @@ -9,7 +9,7 @@ use crate::{parser::labels::ParsingRuleLabel, token::TokenKind}; use super::Parser; -impl<'a> Parser<'a> { +impl Parser<'_> { #[cfg(test)] pub(crate) fn parse_path_or_error(&mut self) -> Path { if let Some(path) = self.parse_path() { diff --git a/compiler/noirc_frontend/src/parser/parser/pattern.rs b/compiler/noirc_frontend/src/parser/parser/pattern.rs index 15613bb8f99..578b238a1dd 100644 --- a/compiler/noirc_frontend/src/parser/parser/pattern.rs +++ b/compiler/noirc_frontend/src/parser/parser/pattern.rs @@ -22,7 +22,7 @@ pub(crate) struct SelfPattern { pub(crate) mutable: bool, } -impl<'a> Parser<'a> { +impl Parser<'_> { pub(crate) fn parse_pattern_or_error(&mut self) -> Pattern { if let Some(pattern) = self.parse_pattern() { return pattern; diff --git a/compiler/noirc_frontend/src/parser/parser/statement.rs b/compiler/noirc_frontend/src/parser/parser/statement.rs index c7e29c5dc61..f90173156e7 100644 --- a/compiler/noirc_frontend/src/parser/parser/statement.rs +++ b/compiler/noirc_frontend/src/parser/parser/statement.rs @@ -12,7 +12,7 @@ use crate::{ use super::Parser; -impl<'a> Parser<'a> { +impl Parser<'_> { pub(crate) fn parse_statement_or_error(&mut self) -> Statement { if let Some((statement, (_token, _span))) = self.parse_statement() { statement @@ -105,7 +105,7 @@ impl<'a> Parser<'a> { if let Some(token) = self.eat_kind(TokenKind::InternedStatement) { match token.into_token() { Token::InternedStatement(statement) => { - return Some(StatementKind::Interned(statement)) + return Some(StatementKind::Interned(statement)); } _ => unreachable!(), } diff --git a/compiler/noirc_frontend/src/parser/parser/statement_or_expression_or_lvalue.rs b/compiler/noirc_frontend/src/parser/parser/statement_or_expression_or_lvalue.rs index a6167a16538..0ac1b1972c4 100644 --- a/compiler/noirc_frontend/src/parser/parser/statement_or_expression_or_lvalue.rs +++ b/compiler/noirc_frontend/src/parser/parser/statement_or_expression_or_lvalue.rs @@ -12,7 +12,7 @@ pub enum StatementOrExpressionOrLValue { LValue(LValue), } -impl<'a> Parser<'a> { +impl Parser<'_> { /// Parses either a statement, an expression or an LValue. Returns `StatementKind::Error` /// if none can be parsed, recording an error if so. /// diff --git a/compiler/noirc_frontend/src/parser/parser/structs.rs b/compiler/noirc_frontend/src/parser/parser/structs.rs index 3eebf7ffd1e..00bab45e97c 100644 --- a/compiler/noirc_frontend/src/parser/parser/structs.rs +++ b/compiler/noirc_frontend/src/parser/parser/structs.rs @@ -8,7 +8,7 @@ use crate::{ use super::{parse_many::separated_by_comma_until_right_brace, Parser}; -impl<'a> Parser<'a> { +impl Parser<'_> { /// Struct = 'struct' identifier Generics '{' StructField* '}' /// /// StructField = OuterDocComments identifier ':' Type diff --git a/compiler/noirc_frontend/src/parser/parser/traits.rs b/compiler/noirc_frontend/src/parser/parser/traits.rs index 614a56e2ec4..80e336f4c81 100644 --- a/compiler/noirc_frontend/src/parser/parser/traits.rs +++ b/compiler/noirc_frontend/src/parser/parser/traits.rs @@ -15,7 +15,7 @@ use crate::{ use super::parse_many::without_separator; use super::Parser; -impl<'a> Parser<'a> { +impl Parser<'_> { /// Trait = 'trait' identifier Generics ( ':' TraitBounds )? WhereClause TraitBody /// | 'trait' identifier Generics '=' TraitBounds WhereClause ';' pub(crate) fn parse_trait( diff --git a/compiler/noirc_frontend/src/parser/parser/type_alias.rs b/compiler/noirc_frontend/src/parser/parser/type_alias.rs index 109b1f572ed..50e67ec6d53 100644 --- a/compiler/noirc_frontend/src/parser/parser/type_alias.rs +++ b/compiler/noirc_frontend/src/parser/parser/type_alias.rs @@ -7,7 +7,7 @@ use crate::{ use super::Parser; -impl<'a> Parser<'a> { +impl Parser<'_> { /// TypeAlias = 'type' identifier Generics '=' Type ';' pub(crate) fn parse_type_alias( &mut self, diff --git a/compiler/noirc_frontend/src/parser/parser/type_expression.rs b/compiler/noirc_frontend/src/parser/parser/type_expression.rs index 5f4fa41f2bb..1b4eaabaf35 100644 --- a/compiler/noirc_frontend/src/parser/parser/type_expression.rs +++ b/compiler/noirc_frontend/src/parser/parser/type_expression.rs @@ -10,7 +10,7 @@ use noirc_errors::Location; use super::{parse_many::separated_by_comma_until_right_paren, Parser}; -impl<'a> Parser<'a> { +impl Parser<'_> { /// TypeExpression= AddOrSubtractTypeExpression pub(crate) fn parse_type_expression( &mut self, diff --git a/compiler/noirc_frontend/src/parser/parser/types.rs b/compiler/noirc_frontend/src/parser/parser/types.rs index 14e7614f044..b8551fd5352 100644 --- a/compiler/noirc_frontend/src/parser/parser/types.rs +++ b/compiler/noirc_frontend/src/parser/parser/types.rs @@ -9,7 +9,7 @@ use crate::{ use super::{parse_many::separated_by_comma_until_right_paren, Parser}; -impl<'a> Parser<'a> { +impl Parser<'_> { pub(crate) fn parse_type_or_error(&mut self) -> UnresolvedType { if let Some(typ) = self.parse_type() { typ diff --git a/compiler/noirc_frontend/src/parser/parser/use_tree.rs b/compiler/noirc_frontend/src/parser/parser/use_tree.rs index f107387ad32..393af80dd29 100644 --- a/compiler/noirc_frontend/src/parser/parser/use_tree.rs +++ b/compiler/noirc_frontend/src/parser/parser/use_tree.rs @@ -8,7 +8,7 @@ use crate::{ use super::{parse_many::separated_by_comma_until_right_brace, Parser}; -impl<'a> Parser<'a> { +impl Parser<'_> { /// Use = 'use' PathKind PathNoTurbofish UseTree /// /// UseTree = PathNoTurbofish ( '::' '{' UseTreeList? '}' )? diff --git a/compiler/noirc_frontend/src/parser/parser/where_clause.rs b/compiler/noirc_frontend/src/parser/parser/where_clause.rs index 09e2f106481..97cf8854cc1 100644 --- a/compiler/noirc_frontend/src/parser/parser/where_clause.rs +++ b/compiler/noirc_frontend/src/parser/parser/where_clause.rs @@ -9,7 +9,7 @@ use super::{ Parser, }; -impl<'a> Parser<'a> { +impl Parser<'_> { /// WhereClause = 'where' WhereClauseItems? /// /// WhereClauseItems = WhereClauseItem ( ',' WhereClauseItem )* ','? diff --git a/rust-toolchain.toml b/rust-toolchain.toml index e647d5cbf46..ed1915d2613 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.75.0" +channel = "1.85.0" components = [ "rust-src" ] -targets = [ "wasm32-unknown-unknown", "wasm32-wasi", "aarch64-apple-darwin" ] +targets = [ "wasm32-unknown-unknown", "wasm32-wasip1", "aarch64-apple-darwin" ] profile = "default" diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index 254b79c7eb4..1dbe9d11eee 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -794,11 +794,11 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } pub(super) fn get_variables(&self) -> Vec> { - return self.foreign_call_executor.get_variables(); + self.foreign_call_executor.get_variables() } pub(super) fn current_stack_frame(&self) -> Option> { - return self.foreign_call_executor.current_stack_frame(); + self.foreign_call_executor.current_stack_frame() } fn breakpoint_reached(&self) -> bool { diff --git a/tooling/lsp/src/attribute_reference_finder.rs b/tooling/lsp/src/attribute_reference_finder.rs index cd0a39b9748..f8b4b823bb5 100644 --- a/tooling/lsp/src/attribute_reference_finder.rs +++ b/tooling/lsp/src/attribute_reference_finder.rs @@ -65,7 +65,7 @@ impl<'a> AttributeReferenceFinder<'a> { } } -impl<'a> Visitor for AttributeReferenceFinder<'a> { +impl Visitor for AttributeReferenceFinder<'_> { fn visit_parsed_submodule(&mut self, parsed_sub_module: &ParsedSubModule, _span: Span) -> bool { // Switch `self.module_id` to the submodule let previous_module_id = self.module_id; diff --git a/tooling/lsp/src/requests/code_action.rs b/tooling/lsp/src/requests/code_action.rs index c3844112d5c..a75acfe6d50 100644 --- a/tooling/lsp/src/requests/code_action.rs +++ b/tooling/lsp/src/requests/code_action.rs @@ -233,7 +233,7 @@ impl<'a> CodeActionFinder<'a> { } } -impl<'a> Visitor for CodeActionFinder<'a> { +impl Visitor for CodeActionFinder<'_> { fn visit_item(&mut self, item: &Item) -> bool { if let ItemKind::Import(use_tree, _) = &item.kind { if let Some(lsp_location) = to_lsp_location(self.files, self.file, item.location.span) { diff --git a/tooling/lsp/src/requests/code_action/fill_struct_fields.rs b/tooling/lsp/src/requests/code_action/fill_struct_fields.rs index 7552a96f42d..0f9188388ee 100644 --- a/tooling/lsp/src/requests/code_action/fill_struct_fields.rs +++ b/tooling/lsp/src/requests/code_action/fill_struct_fields.rs @@ -9,7 +9,7 @@ use crate::byte_span_to_range; use super::CodeActionFinder; -impl<'a> CodeActionFinder<'a> { +impl CodeActionFinder<'_> { pub(super) fn fill_struct_fields(&mut self, constructor: &ConstructorExpression, span: Span) { if !self.includes_span(span) { return; diff --git a/tooling/lsp/src/requests/code_action/implement_missing_members.rs b/tooling/lsp/src/requests/code_action/implement_missing_members.rs index c29caf79848..a39df735b34 100644 --- a/tooling/lsp/src/requests/code_action/implement_missing_members.rs +++ b/tooling/lsp/src/requests/code_action/implement_missing_members.rs @@ -11,7 +11,7 @@ use crate::{byte_span_to_range, trait_impl_method_stub_generator::TraitImplMetho use super::CodeActionFinder; -impl<'a> CodeActionFinder<'a> { +impl CodeActionFinder<'_> { pub(super) fn implement_missing_members( &mut self, noir_trait_impl: &NoirTraitImpl, diff --git a/tooling/lsp/src/requests/code_action/import_or_qualify.rs b/tooling/lsp/src/requests/code_action/import_or_qualify.rs index 1141aca23d2..7859b08f5e7 100644 --- a/tooling/lsp/src/requests/code_action/import_or_qualify.rs +++ b/tooling/lsp/src/requests/code_action/import_or_qualify.rs @@ -12,7 +12,7 @@ use crate::{ use super::CodeActionFinder; -impl<'a> CodeActionFinder<'a> { +impl CodeActionFinder<'_> { pub(super) fn import_or_qualify(&mut self, path: &Path) { if path.segments.len() != 1 { return; diff --git a/tooling/lsp/src/requests/code_action/import_trait.rs b/tooling/lsp/src/requests/code_action/import_trait.rs index 8e772063aa3..56976c119cd 100644 --- a/tooling/lsp/src/requests/code_action/import_trait.rs +++ b/tooling/lsp/src/requests/code_action/import_trait.rs @@ -17,7 +17,7 @@ use crate::{ use super::CodeActionFinder; -impl<'a> CodeActionFinder<'a> { +impl CodeActionFinder<'_> { pub(super) fn import_trait_in_method_call(&mut self, method_call: &MethodCallExpression) { // First see if the method name already points to a function. let name_location = Location::new(method_call.method_name.span(), self.file); diff --git a/tooling/lsp/src/requests/code_action/remove_bang_from_call.rs b/tooling/lsp/src/requests/code_action/remove_bang_from_call.rs index 90f4fef0efd..8f6fed76cac 100644 --- a/tooling/lsp/src/requests/code_action/remove_bang_from_call.rs +++ b/tooling/lsp/src/requests/code_action/remove_bang_from_call.rs @@ -6,7 +6,7 @@ use crate::byte_span_to_range; use super::CodeActionFinder; -impl<'a> CodeActionFinder<'a> { +impl CodeActionFinder<'_> { pub(super) fn remove_bang_from_call(&mut self, span: Span) { // If we can't find the referenced function, there's nothing we can do let Some(ReferenceId::Function(func_id)) = diff --git a/tooling/lsp/src/requests/code_action/remove_unused_import.rs b/tooling/lsp/src/requests/code_action/remove_unused_import.rs index d64be66d75a..b9837eca65d 100644 --- a/tooling/lsp/src/requests/code_action/remove_unused_import.rs +++ b/tooling/lsp/src/requests/code_action/remove_unused_import.rs @@ -14,7 +14,7 @@ use crate::byte_span_to_range; use super::CodeActionFinder; -impl<'a> CodeActionFinder<'a> { +impl CodeActionFinder<'_> { pub(super) fn remove_unused_import( &mut self, use_tree: &UseTree, diff --git a/tooling/lsp/src/requests/completion.rs b/tooling/lsp/src/requests/completion.rs index a2a7c14f4c1..622293b7093 100644 --- a/tooling/lsp/src/requests/completion.rs +++ b/tooling/lsp/src/requests/completion.rs @@ -1202,7 +1202,7 @@ impl<'a> NodeFinder<'a> { } } -impl<'a> Visitor for NodeFinder<'a> { +impl Visitor for NodeFinder<'_> { fn visit_item(&mut self, item: &Item) -> bool { if let ItemKind::Import(use_tree, _) = &item.kind { if let Some(lsp_location) = to_lsp_location(self.files, self.file, item.location.span) { diff --git a/tooling/lsp/src/requests/completion/auto_import.rs b/tooling/lsp/src/requests/completion/auto_import.rs index 08d155f333c..acd77a63e48 100644 --- a/tooling/lsp/src/requests/completion/auto_import.rs +++ b/tooling/lsp/src/requests/completion/auto_import.rs @@ -14,7 +14,7 @@ use super::{ NodeFinder, }; -impl<'a> NodeFinder<'a> { +impl NodeFinder<'_> { pub(super) fn complete_auto_imports( &mut self, prefix: &str, diff --git a/tooling/lsp/src/requests/completion/builtins.rs b/tooling/lsp/src/requests/completion/builtins.rs index 09ffcc14e39..c8093d50c6a 100644 --- a/tooling/lsp/src/requests/completion/builtins.rs +++ b/tooling/lsp/src/requests/completion/builtins.rs @@ -11,7 +11,7 @@ use super::{ name_matches, NodeFinder, }; -impl<'a> NodeFinder<'a> { +impl NodeFinder<'_> { pub(super) fn builtin_functions_completion( &mut self, prefix: &str, diff --git a/tooling/lsp/src/requests/completion/completion_items.rs b/tooling/lsp/src/requests/completion/completion_items.rs index cfd11bfe1ad..394d5e9a46f 100644 --- a/tooling/lsp/src/requests/completion/completion_items.rs +++ b/tooling/lsp/src/requests/completion/completion_items.rs @@ -25,7 +25,7 @@ use super::{ FunctionCompletionKind, FunctionKind, NodeFinder, RequestedItems, TraitReexport, }; -impl<'a> NodeFinder<'a> { +impl NodeFinder<'_> { pub(super) fn module_def_id_completion_items( &self, module_def_id: ModuleDefId, @@ -44,7 +44,7 @@ impl<'a> NodeFinder<'a> { }, RequestedItems::OnlyTraits => match module_def_id { ModuleDefId::FunctionId(_) | ModuleDefId::GlobalId(_) | ModuleDefId::TypeId(_) => { - return Vec::new() + return Vec::new(); } ModuleDefId::ModuleId(_) | ModuleDefId::TypeAliasId(_) diff --git a/tooling/lsp/src/requests/document_symbol.rs b/tooling/lsp/src/requests/document_symbol.rs index 33a7d114931..3ed4df380b2 100644 --- a/tooling/lsp/src/requests/document_symbol.rs +++ b/tooling/lsp/src/requests/document_symbol.rs @@ -143,7 +143,7 @@ impl<'a> DocumentSymbolCollector<'a> { } } -impl<'a> Visitor for DocumentSymbolCollector<'a> { +impl Visitor for DocumentSymbolCollector<'_> { fn visit_noir_function(&mut self, noir_function: &NoirFunction, span: Span) -> bool { if noir_function.def.name.0.contents.is_empty() { return false; diff --git a/tooling/lsp/src/requests/hover/from_reference.rs b/tooling/lsp/src/requests/hover/from_reference.rs index 62a137c9232..ab9c6f0e58a 100644 --- a/tooling/lsp/src/requests/hover/from_reference.rs +++ b/tooling/lsp/src/requests/hover/from_reference.rs @@ -781,7 +781,7 @@ struct TypeLinksGatherer<'a> { links: Vec, } -impl<'a> TypeLinksGatherer<'a> { +impl TypeLinksGatherer<'_> { fn gather_type_links(&mut self, typ: &Type) { match typ { Type::Array(typ, _) => self.gather_type_links(typ), diff --git a/tooling/lsp/src/requests/hover/from_visitor.rs b/tooling/lsp/src/requests/hover/from_visitor.rs index 97571f561c7..595cd255796 100644 --- a/tooling/lsp/src/requests/hover/from_visitor.rs +++ b/tooling/lsp/src/requests/hover/from_visitor.rs @@ -51,7 +51,7 @@ impl<'a> HoverFinder<'a> { } } -impl<'a> Visitor for HoverFinder<'a> { +impl Visitor for HoverFinder<'_> { fn visit_literal_integer(&mut self, value: SignedField, span: Span) { if !self.intersects_span(span) { return; @@ -78,7 +78,9 @@ fn format_integer(typ: Type, value: SignedField) -> String { let value_big_int = BigInt::from_str(&value_base_10).unwrap(); let negative = if value.is_negative { "-" } else { "" }; - format!(" {typ}\n---\nvalue of literal: `{negative}{value_base_10} ({negative}0x{value_big_int:02x})`") + format!( + " {typ}\n---\nvalue of literal: `{negative}{value_base_10} ({negative}0x{value_big_int:02x})`" + ) } #[cfg(test)] diff --git a/tooling/lsp/src/requests/inlay_hint.rs b/tooling/lsp/src/requests/inlay_hint.rs index 704e70c4353..c0af56d0340 100644 --- a/tooling/lsp/src/requests/inlay_hint.rs +++ b/tooling/lsp/src/requests/inlay_hint.rs @@ -302,7 +302,7 @@ impl<'a> InlayHintCollector<'a> { } fn intersects_span(&self, other_span: Span) -> bool { - self.span.map_or(true, |span| span.intersects(&other_span)) + self.span.is_none_or(|span| span.intersects(&other_span)) } fn show_closing_brace_hint(&mut self, span: Span, f: F) @@ -320,7 +320,7 @@ impl<'a> InlayHintCollector<'a> { } } -impl<'a> Visitor for InlayHintCollector<'a> { +impl Visitor for InlayHintCollector<'_> { fn visit_item(&mut self, item: &Item) -> bool { self.intersects_span(item.location.span) } diff --git a/tooling/lsp/src/requests/rename.rs b/tooling/lsp/src/requests/rename.rs index 95dd6b506be..01181fc60f4 100644 --- a/tooling/lsp/src/requests/rename.rs +++ b/tooling/lsp/src/requests/rename.rs @@ -111,7 +111,10 @@ mod rename_tests { changes.iter().filter(|range| !ranges.contains(range)).collect(); let extra_in_ranges: Vec<_> = ranges.iter().filter(|range| !changes.contains(range)).collect(); - panic!("Rename locations did not match.\nThese renames were not found: {:?}\nThese renames should not have been found: {:?}", extra_in_ranges, extra_in_changes); + panic!( + "Rename locations did not match.\nThese renames were not found: {:?}\nThese renames should not have been found: {:?}", + extra_in_ranges, extra_in_changes + ); } assert_eq!(changes, ranges); } diff --git a/tooling/lsp/src/requests/signature_help.rs b/tooling/lsp/src/requests/signature_help.rs index 72684fd3eb2..ac15835ddac 100644 --- a/tooling/lsp/src/requests/signature_help.rs +++ b/tooling/lsp/src/requests/signature_help.rs @@ -332,7 +332,7 @@ impl<'a> SignatureFinder<'a> { } } -impl<'a> Visitor for SignatureFinder<'a> { +impl Visitor for SignatureFinder<'_> { fn visit_item(&mut self, item: &Item) -> bool { self.includes_span(item.location.span) } diff --git a/tooling/nargo/src/foreign_calls/default.rs b/tooling/nargo/src/foreign_calls/default.rs index 19928e89563..753c9b8b475 100644 --- a/tooling/nargo/src/foreign_calls/default.rs +++ b/tooling/nargo/src/foreign_calls/default.rs @@ -29,7 +29,7 @@ pub struct DefaultForeignCallBuilder<'a> { pub package_name: Option, } -impl<'a> Default for DefaultForeignCallBuilder<'a> { +impl Default for DefaultForeignCallBuilder<'_> { fn default() -> Self { Self { output: PrintOutput::default(), diff --git a/tooling/nargo/src/lib.rs b/tooling/nargo/src/lib.rs index 30f25356e41..f046f2d38d0 100644 --- a/tooling/nargo/src/lib.rs +++ b/tooling/nargo/src/lib.rs @@ -106,7 +106,7 @@ fn insert_all_files_for_package_into_file_manager( continue; } - if !entry.path().extension().map_or(false, |ext| ext == FILE_EXTENSION) { + if entry.path().extension().is_none_or(|ext| ext != FILE_EXTENSION) { continue; }; diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index bb08d2675cb..af07c2d5fcd 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -80,7 +80,7 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n let noir_files_modified = debounced_events.iter().any(|event| { let mut event_paths = event.event.paths.iter(); let event_affects_noir_file = - event_paths.any(|path| path.extension().map_or(false, |ext| ext == "nr")); + event_paths.any(|path| path.extension().is_some_and(|ext| ext == "nr")); let is_relevant_event_kind = matches!( event.kind, diff --git a/tooling/nargo_cli/src/cli/dap_cmd.rs b/tooling/nargo_cli/src/cli/dap_cmd.rs index 91db844ead3..852e9f25574 100644 --- a/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -143,12 +143,7 @@ fn loop_uninitialized_dap( expression_width: ExpressionWidth, pedantic_solving: bool, ) -> Result<(), DapError> { - loop { - let req = match server.poll_request()? { - Some(req) => req, - None => break, - }; - + while let Some(req) = server.poll_request()? { match req.command { Command::Initialize(_) => { let rsp = req.success(ResponseBody::Initialize(Capabilities { diff --git a/tooling/nargo_cli/src/cli/fmt_cmd.rs b/tooling/nargo_cli/src/cli/fmt_cmd.rs index 1cdfb1e0c4f..b16ce9d1f7d 100644 --- a/tooling/nargo_cli/src/cli/fmt_cmd.rs +++ b/tooling/nargo_cli/src/cli/fmt_cmd.rs @@ -119,7 +119,7 @@ fn visit_noir_files( let path = entry.path(); if path.is_dir() { visit_noir_files(&path, cb)?; - } else if entry.path().extension().map_or(false, |extension| extension == "nr") { + } else if entry.path().extension().is_some_and(|extension| extension == "nr") { cb(&entry)?; } } diff --git a/tooling/nargo_cli/src/cli/generate_completion_script_cmd.rs b/tooling/nargo_cli/src/cli/generate_completion_script_cmd.rs index 0b8fa1ee3e7..5f7329e14ba 100644 --- a/tooling/nargo_cli/src/cli/generate_completion_script_cmd.rs +++ b/tooling/nargo_cli/src/cli/generate_completion_script_cmd.rs @@ -23,7 +23,7 @@ pub(crate) fn run(command: GenerateCompletionScriptCommand) -> Result<(), CliErr return Err(CliError::Generic( "Invalid shell. Supported shells are: bash, elvish, fish, powershell, zsh" .to_string(), - )) + )); } }; diff --git a/tooling/nargo_cli/src/cli/mod.rs b/tooling/nargo_cli/src/cli/mod.rs index 1d59fdb806f..265b3b58524 100644 --- a/tooling/nargo_cli/src/cli/mod.rs +++ b/tooling/nargo_cli/src/cli/mod.rs @@ -214,13 +214,11 @@ where /// The lock taken can be shared for commands that only read the artifacts, /// or exclusive for the ones that (might) write artifacts as well. fn lock_workspace(workspace: &Workspace, exclusive: bool) -> Result, CliError> { - use fs2::FileExt as _; - struct LockedFile(File); impl Drop for LockedFile { fn drop(&mut self) { - let _ = self.0.unlock(); + let _ = fs2::FileExt::unlock(&self.0); } } @@ -233,15 +231,17 @@ fn lock_workspace(workspace: &Workspace, exclusive: bool) -> Result TestRunner<'a> { // Specify a larger-than-default stack size to prevent overflowing stack in large programs. // (the default is 2MB) .stack_size(STACK_SIZE) - .spawn_scoped(scope, move || loop { - // Get next test to process from the iterator. - let Some(test) = iter.lock().unwrap().next() else { - break; - }; - - self.formatter - .test_start_async(&test.name, &test.package_name) - .expect("Could not display test start"); - - let time_before_test = std::time::Instant::now(); - let (status, output) = match catch_unwind(test.runner) { - Ok((status, output)) => (status, output), - Err(err) => ( - TestStatus::Fail { + .spawn_scoped(scope, move || { + loop { + // Get next test to process from the iterator. + let Some(test) = iter.lock().unwrap().next() else { + break; + }; + + self.formatter + .test_start_async(&test.name, &test.package_name) + .expect("Could not display test start"); + + let time_before_test = std::time::Instant::now(); + let (status, output) = match catch_unwind(test.runner) { + Ok((status, output)) => (status, output), + Err(err) => ( + TestStatus::Fail { message: // It seems `panic!("...")` makes the error be `&str`, so we handle this common case if let Some(message) = err.downcast_ref::<&str>() { @@ -285,31 +286,32 @@ impl<'a> TestRunner<'a> { }, error_diagnostic: None, }, - String::new(), - ), - }; - let time_to_run = time_before_test.elapsed(); - - let test_result = TestResult { - name: test.name, - package_name: test.package_name, - status, - output, - time_to_run, - }; - - self.formatter - .test_end_async( - &test_result, - self.file_manager, - self.args.show_output, - self.args.compile_options.deny_warnings, - self.args.compile_options.silence_warnings, - ) - .expect("Could not display test start"); - - if thread_sender.send(test_result).is_err() { - break; + String::new(), + ), + }; + let time_to_run = time_before_test.elapsed(); + + let test_result = TestResult { + name: test.name, + package_name: test.package_name, + status, + output, + time_to_run, + }; + + self.formatter + .test_end_async( + &test_result, + self.file_manager, + self.args.show_output, + self.args.compile_options.deny_warnings, + self.args.compile_options.silence_warnings, + ) + .expect("Could not display test start"); + + if thread_sender.send(test_result).is_err() { + break; + } } }) .unwrap(); @@ -407,19 +409,21 @@ impl<'a> TestRunner<'a> { // Specify a larger-than-default stack size to prevent overflowing stack in large programs. // (the default is 2MB) .stack_size(STACK_SIZE) - .spawn_scoped(scope, move || loop { - // Get next package to process from the iterator. - let Some(package) = iter.lock().unwrap().next() else { - break; - }; - let tests = self.collect_package_tests::( - package, - self.args.oracle_resolver.as_deref(), - Some(self.workspace.root_dir.clone()), - package.name.to_string(), - ); - if thread_sender.send((package, tests)).is_err() { - break; + .spawn_scoped(scope, move || { + loop { + // Get next package to process from the iterator. + let Some(package) = iter.lock().unwrap().next() else { + break; + }; + let tests = self.collect_package_tests::( + package, + self.args.oracle_resolver.as_deref(), + Some(self.workspace.root_dir.clone()), + package.name.to_string(), + ); + if thread_sender.send((package, tests)).is_err() { + break; + } } }) .unwrap(); diff --git a/tooling/nargo_fmt/src/chunks.rs b/tooling/nargo_fmt/src/chunks.rs index fcef261284d..facf23f0ff1 100644 --- a/tooling/nargo_fmt/src/chunks.rs +++ b/tooling/nargo_fmt/src/chunks.rs @@ -546,7 +546,7 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { /// Treating a `ChunkFormatter` as a `Formatter` in read-only mode is always fine, /// and reduces some boilerplate. -impl<'a, 'b> Deref for ChunkFormatter<'a, 'b> { +impl<'b> Deref for ChunkFormatter<'_, 'b> { type Target = Formatter<'b>; fn deref(&self) -> &Self::Target { diff --git a/tooling/nargo_fmt/src/formatter/alias.rs b/tooling/nargo_fmt/src/formatter/alias.rs index d4c63ebdd9e..96eedfd3d88 100644 --- a/tooling/nargo_fmt/src/formatter/alias.rs +++ b/tooling/nargo_fmt/src/formatter/alias.rs @@ -5,7 +5,7 @@ use noirc_frontend::{ use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_type_alias(&mut self, type_alias: NoirTypeAlias) { self.write_indentation(); self.format_item_visibility(type_alias.visibility); diff --git a/tooling/nargo_fmt/src/formatter/attribute.rs b/tooling/nargo_fmt/src/formatter/attribute.rs index 19d5730a546..d23a788eae4 100644 --- a/tooling/nargo_fmt/src/formatter/attribute.rs +++ b/tooling/nargo_fmt/src/formatter/attribute.rs @@ -6,7 +6,7 @@ use crate::chunks::ChunkGroup; use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_attributes(&mut self, attributes: Attributes) { let mut all_attributes = Vec::new(); for attribute in attributes.secondary { diff --git a/tooling/nargo_fmt/src/formatter/comments_and_whitespace.rs b/tooling/nargo_fmt/src/formatter/comments_and_whitespace.rs index 6a3af34345c..da988e7039e 100644 --- a/tooling/nargo_fmt/src/formatter/comments_and_whitespace.rs +++ b/tooling/nargo_fmt/src/formatter/comments_and_whitespace.rs @@ -7,7 +7,7 @@ const NEWLINE: &str = "\r\n"; #[cfg(not(windows))] const NEWLINE: &str = "\n"; -impl<'a> Formatter<'a> { +impl Formatter<'_> { /// Writes a single space, skipping any whitespace and comments. /// That is, suppose the next token is a big whitespace, possibly with multiple lines. /// Those are skipped but only one space is written. In this way if we have diff --git a/tooling/nargo_fmt/src/formatter/doc_comments.rs b/tooling/nargo_fmt/src/formatter/doc_comments.rs index f591f09e729..6d25d7688d0 100644 --- a/tooling/nargo_fmt/src/formatter/doc_comments.rs +++ b/tooling/nargo_fmt/src/formatter/doc_comments.rs @@ -2,7 +2,7 @@ use noirc_frontend::token::{DocStyle, Token}; use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_inner_doc_comments(&mut self) { loop { self.skip_comments_and_whitespace(); diff --git a/tooling/nargo_fmt/src/formatter/enums.rs b/tooling/nargo_fmt/src/formatter/enums.rs index 2d1182a941c..beabf11fa46 100644 --- a/tooling/nargo_fmt/src/formatter/enums.rs +++ b/tooling/nargo_fmt/src/formatter/enums.rs @@ -6,7 +6,7 @@ use noirc_frontend::{ use super::Formatter; use crate::chunks::ChunkGroup; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_enum(&mut self, noir_enum: NoirEnumeration) { self.format_secondary_attributes(noir_enum.attributes); self.write_indentation(); diff --git a/tooling/nargo_fmt/src/formatter/expression.rs b/tooling/nargo_fmt/src/formatter/expression.rs index 56c4374dd77..b83c06724e4 100644 --- a/tooling/nargo_fmt/src/formatter/expression.rs +++ b/tooling/nargo_fmt/src/formatter/expression.rs @@ -19,7 +19,7 @@ struct FormattedLambda { first_line_width: usize, } -impl<'a, 'b> ChunkFormatter<'a, 'b> { +impl ChunkFormatter<'_, '_> { pub(super) fn format_expression(&mut self, expression: Expression, group: &mut ChunkGroup) { group.leading_comment(self.chunk(|formatter| { // Doc comments for an expression could come before a potential non-doc comment @@ -1167,7 +1167,9 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { ConstrainKind::Assert => Keyword::Assert, ConstrainKind::AssertEq => Keyword::AssertEq, ConstrainKind::Constrain => { - unreachable!("constrain always produces an error, and the formatter doesn't run when there are errors") + unreachable!( + "constrain always produces an error, and the formatter doesn't run when there are errors" + ) } }; @@ -1320,7 +1322,7 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { } } -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_empty_block_contents(&mut self) { if let Some(chunks) = self.chunk_formatter().empty_block_contents_chunk() { self.format_chunk_group(chunks); diff --git a/tooling/nargo_fmt/src/formatter/function.rs b/tooling/nargo_fmt/src/formatter/function.rs index f39110d4cb9..f37683286d3 100644 --- a/tooling/nargo_fmt/src/formatter/function.rs +++ b/tooling/nargo_fmt/src/formatter/function.rs @@ -22,7 +22,7 @@ pub(super) struct FunctionToFormat { pub(super) skip_visibility: bool, } -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_function(&mut self, func: NoirFunction, skip_visibility: bool) { self.format_function_impl(FunctionToFormat { attributes: func.def.attributes, diff --git a/tooling/nargo_fmt/src/formatter/generics.rs b/tooling/nargo_fmt/src/formatter/generics.rs index 4ee5a743942..c457f4976d5 100644 --- a/tooling/nargo_fmt/src/formatter/generics.rs +++ b/tooling/nargo_fmt/src/formatter/generics.rs @@ -5,7 +5,7 @@ use noirc_frontend::{ use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_generics(&mut self, generics: Vec) { self.skip_comments_and_whitespace(); diff --git a/tooling/nargo_fmt/src/formatter/global.rs b/tooling/nargo_fmt/src/formatter/global.rs index c351e15e3b6..4d6a43d0674 100644 --- a/tooling/nargo_fmt/src/formatter/global.rs +++ b/tooling/nargo_fmt/src/formatter/global.rs @@ -6,7 +6,7 @@ use noirc_frontend::{ use super::Formatter; use crate::chunks::{ChunkFormatter, ChunkGroup}; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_global( &mut self, let_statement: LetStatement, @@ -20,7 +20,7 @@ impl<'a> Formatter<'a> { } } -impl<'a, 'b> ChunkFormatter<'a, 'b> { +impl ChunkFormatter<'_, '_> { pub(super) fn format_global( &mut self, let_statement: LetStatement, diff --git a/tooling/nargo_fmt/src/formatter/impls.rs b/tooling/nargo_fmt/src/formatter/impls.rs index 71548dd5efa..b58b9381d17 100644 --- a/tooling/nargo_fmt/src/formatter/impls.rs +++ b/tooling/nargo_fmt/src/formatter/impls.rs @@ -2,7 +2,7 @@ use noirc_frontend::{ast::TypeImpl, token::Keyword}; use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_impl(&mut self, type_impl: TypeImpl) { let has_where_clause = !type_impl.where_clause.is_empty(); diff --git a/tooling/nargo_fmt/src/formatter/item.rs b/tooling/nargo_fmt/src/formatter/item.rs index 66afec5585f..fa07478cee1 100644 --- a/tooling/nargo_fmt/src/formatter/item.rs +++ b/tooling/nargo_fmt/src/formatter/item.rs @@ -8,7 +8,7 @@ use crate::config::ImportsGranularity; use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_items(&mut self, mut items: Vec, mut ignore_next: bool) { // Reverse the items because we'll be processing them one by one, and it's a bit // more efficient to pop than to shift. diff --git a/tooling/nargo_fmt/src/formatter/lvalue.rs b/tooling/nargo_fmt/src/formatter/lvalue.rs index a815cb24964..8dd1c76ab93 100644 --- a/tooling/nargo_fmt/src/formatter/lvalue.rs +++ b/tooling/nargo_fmt/src/formatter/lvalue.rs @@ -3,7 +3,7 @@ use noirc_frontend::{ast::LValue, token::Token}; use super::Formatter; use crate::chunks::ChunkGroup; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_lvalue(&mut self, lvalue: LValue) { // Parenthesized l-values exist but are not represented in the AST while let Token::LeftParen = self.token { diff --git a/tooling/nargo_fmt/src/formatter/module.rs b/tooling/nargo_fmt/src/formatter/module.rs index e07d22c7586..6bf4e30786d 100644 --- a/tooling/nargo_fmt/src/formatter/module.rs +++ b/tooling/nargo_fmt/src/formatter/module.rs @@ -4,7 +4,7 @@ use noirc_frontend::{ use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_module_declaration(&mut self, module_declaration: ModuleDeclaration) { self.format_secondary_attributes(module_declaration.outer_attributes); self.write_indentation(); diff --git a/tooling/nargo_fmt/src/formatter/path.rs b/tooling/nargo_fmt/src/formatter/path.rs index 2a46467bf72..f79bb72eff1 100644 --- a/tooling/nargo_fmt/src/formatter/path.rs +++ b/tooling/nargo_fmt/src/formatter/path.rs @@ -5,7 +5,7 @@ use noirc_frontend::{ use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_path(&mut self, path: Path) { self.skip_comments_and_whitespace(); diff --git a/tooling/nargo_fmt/src/formatter/pattern.rs b/tooling/nargo_fmt/src/formatter/pattern.rs index 9a76612109b..e8c6f9f1ec2 100644 --- a/tooling/nargo_fmt/src/formatter/pattern.rs +++ b/tooling/nargo_fmt/src/formatter/pattern.rs @@ -6,7 +6,7 @@ use noirc_frontend::{ use super::Formatter; use crate::chunks::ChunkGroup; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_pattern(&mut self, pattern: Pattern) { self.skip_comments_and_whitespace(); diff --git a/tooling/nargo_fmt/src/formatter/statement.rs b/tooling/nargo_fmt/src/formatter/statement.rs index 18b553c4534..d70778ae5d1 100644 --- a/tooling/nargo_fmt/src/formatter/statement.rs +++ b/tooling/nargo_fmt/src/formatter/statement.rs @@ -8,7 +8,7 @@ use noirc_frontend::{ use crate::chunks::{ChunkFormatter, ChunkGroup, GroupKind}; -impl<'a, 'b> ChunkFormatter<'a, 'b> { +impl ChunkFormatter<'_, '_> { pub(super) fn format_statement( &mut self, statement: Statement, diff --git a/tooling/nargo_fmt/src/formatter/structs.rs b/tooling/nargo_fmt/src/formatter/structs.rs index c26ab552f30..9af10ff505c 100644 --- a/tooling/nargo_fmt/src/formatter/structs.rs +++ b/tooling/nargo_fmt/src/formatter/structs.rs @@ -6,7 +6,7 @@ use noirc_frontend::{ use super::Formatter; use crate::chunks::ChunkGroup; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_struct(&mut self, noir_struct: NoirStruct) { self.format_secondary_attributes(noir_struct.attributes); self.write_indentation(); diff --git a/tooling/nargo_fmt/src/formatter/trait_impl.rs b/tooling/nargo_fmt/src/formatter/trait_impl.rs index 896620c3bf8..9252082b26d 100644 --- a/tooling/nargo_fmt/src/formatter/trait_impl.rs +++ b/tooling/nargo_fmt/src/formatter/trait_impl.rs @@ -5,7 +5,7 @@ use noirc_frontend::{ use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_trait_impl(&mut self, trait_impl: NoirTraitImpl) { // skip synthetic trait impl's, e.g. generated from trait aliases if trait_impl.is_synthetic { diff --git a/tooling/nargo_fmt/src/formatter/traits.rs b/tooling/nargo_fmt/src/formatter/traits.rs index c40389f35e4..7381dee35d7 100644 --- a/tooling/nargo_fmt/src/formatter/traits.rs +++ b/tooling/nargo_fmt/src/formatter/traits.rs @@ -6,7 +6,7 @@ use noirc_frontend::{ use super::{function::FunctionToFormat, Formatter}; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_trait(&mut self, noir_trait: NoirTrait) { self.format_secondary_attributes(noir_trait.attributes); self.write_indentation(); diff --git a/tooling/nargo_fmt/src/formatter/type_expression.rs b/tooling/nargo_fmt/src/formatter/type_expression.rs index 95b0c045156..8bebfd42f0c 100644 --- a/tooling/nargo_fmt/src/formatter/type_expression.rs +++ b/tooling/nargo_fmt/src/formatter/type_expression.rs @@ -2,7 +2,7 @@ use noirc_frontend::{ast::UnresolvedTypeExpression, token::Token}; use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_type_expression(&mut self, type_expr: UnresolvedTypeExpression) { self.skip_comments_and_whitespace(); diff --git a/tooling/nargo_fmt/src/formatter/types.rs b/tooling/nargo_fmt/src/formatter/types.rs index ee7e9dc3a4f..6a0e66bc1f9 100644 --- a/tooling/nargo_fmt/src/formatter/types.rs +++ b/tooling/nargo_fmt/src/formatter/types.rs @@ -5,7 +5,7 @@ use noirc_frontend::{ use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_type(&mut self, typ: UnresolvedType) { self.skip_comments_and_whitespace(); diff --git a/tooling/nargo_fmt/src/formatter/use_tree.rs b/tooling/nargo_fmt/src/formatter/use_tree.rs index 98d63ef6611..5fd6aada664 100644 --- a/tooling/nargo_fmt/src/formatter/use_tree.rs +++ b/tooling/nargo_fmt/src/formatter/use_tree.rs @@ -7,7 +7,7 @@ use crate::chunks::{Chunk, ChunkFormatter, ChunkGroup}; use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_import(&mut self, use_tree: UseTree, visibility: ItemVisibility) { let group = self.chunk_formatter().format_import(use_tree, visibility); @@ -16,7 +16,7 @@ impl<'a> Formatter<'a> { } } -impl<'a, 'b> ChunkFormatter<'a, 'b> { +impl ChunkFormatter<'_, '_> { pub(super) fn format_import( &mut self, use_tree: UseTree, diff --git a/tooling/nargo_fmt/src/formatter/use_tree_merge.rs b/tooling/nargo_fmt/src/formatter/use_tree_merge.rs index a679e026435..ca6d64ea89b 100644 --- a/tooling/nargo_fmt/src/formatter/use_tree_merge.rs +++ b/tooling/nargo_fmt/src/formatter/use_tree_merge.rs @@ -9,7 +9,7 @@ use crate::{ use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn merge_and_format_imports( &mut self, imports: Vec, diff --git a/tooling/nargo_fmt/src/formatter/visibility.rs b/tooling/nargo_fmt/src/formatter/visibility.rs index 27441b977bb..2c2279ecb48 100644 --- a/tooling/nargo_fmt/src/formatter/visibility.rs +++ b/tooling/nargo_fmt/src/formatter/visibility.rs @@ -4,7 +4,7 @@ use noirc_frontend::{ token::Keyword, }; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_item_visibility(&mut self, visibility: ItemVisibility) { self.skip_comments_and_whitespace(); diff --git a/tooling/nargo_fmt/src/formatter/where_clause.rs b/tooling/nargo_fmt/src/formatter/where_clause.rs index 927383895b3..c5ecb178bbb 100644 --- a/tooling/nargo_fmt/src/formatter/where_clause.rs +++ b/tooling/nargo_fmt/src/formatter/where_clause.rs @@ -5,7 +5,7 @@ use noirc_frontend::{ use super::Formatter; -impl<'a> Formatter<'a> { +impl Formatter<'_> { pub(super) fn format_where_clause( &mut self, constraints: Vec, diff --git a/tooling/nargo_toml/src/errors.rs b/tooling/nargo_toml/src/errors.rs index 7e1003d04f7..5aeb6a135f1 100644 --- a/tooling/nargo_toml/src/errors.rs +++ b/tooling/nargo_toml/src/errors.rs @@ -28,7 +28,9 @@ pub enum ManifestError { #[error("Nargo.toml is badly formed, could not parse.\n\n {0}")] MalformedFile(#[from] toml::de::Error), - #[error("Unexpected workspace definition found in {0}. If you're attempting to load this as a dependency, you may need to add a `directory` field to your `Nargo.toml` to show which package within the workspace to use")] + #[error( + "Unexpected workspace definition found in {0}. If you're attempting to load this as a dependency, you may need to add a `directory` field to your `Nargo.toml` to show which package within the workspace to use" + )] UnexpectedWorkspace(PathBuf), #[error("Cannot find file {entry} which was specified as the `entry` field in {toml}")] @@ -80,16 +82,24 @@ pub enum ManifestError { #[allow(clippy::enum_variant_names)] #[derive(Error, Debug, PartialEq, Eq, Clone)] pub enum SemverError { - #[error("Invalid value for `compiler_version` in package {package_name}. Requirements may only refer to full releases")] + #[error( + "Invalid value for `compiler_version` in package {package_name}. Requirements may only refer to full releases" + )] InvalidCompilerVersionRequirement { package_name: CrateName, required_compiler_version: String }, - #[error("Incompatible compiler version in package {package_name}. Required compiler version is {required_compiler_version} but the compiler version is {compiler_version_found}.\n Update the compiler_version field in Nargo.toml to >={required_compiler_version} or compile this project with version {required_compiler_version}")] + #[error( + "Incompatible compiler version in package {package_name}. Required compiler version is {required_compiler_version} but the compiler version is {compiler_version_found}.\n Update the compiler_version field in Nargo.toml to >={required_compiler_version} or compile this project with version {required_compiler_version}" + )] IncompatibleVersion { package_name: CrateName, required_compiler_version: String, compiler_version_found: String, }, - #[error("Could not parse the required compiler version for package {package_name} in Nargo.toml. Error: {error}")] + #[error( + "Could not parse the required compiler version for package {package_name} in Nargo.toml. Error: {error}" + )] CouldNotParseRequiredVersion { package_name: String, error: String }, - #[error("Could not parse the package version for package {package_name} in Nargo.toml. Error: {error}")] + #[error( + "Could not parse the package version for package {package_name} in Nargo.toml. Error: {error}" + )] CouldNotParsePackageVersion { package_name: String, error: String }, } diff --git a/tooling/nargo_toml/src/flock.rs b/tooling/nargo_toml/src/flock.rs index 031dbcff647..1433827c3fa 100644 --- a/tooling/nargo_toml/src/flock.rs +++ b/tooling/nargo_toml/src/flock.rs @@ -1,4 +1,3 @@ -use fs2::FileExt; use std::{ fs::{File, OpenOptions}, path::Path, @@ -14,11 +13,11 @@ impl FileLock { pub(crate) fn new(file_path: &Path, lock_name: &str) -> std::io::Result { std::fs::create_dir_all(file_path.parent().expect("can't create lock on filesystem root"))?; let file = OpenOptions::new().create(true).truncate(false).write(true).open(file_path)?; - if file.try_lock_exclusive().is_err() { + if fs2::FileExt::try_lock_exclusive(&file).is_err() { eprintln!("Waiting for lock on {lock_name}..."); } - file.lock_exclusive()?; + fs2::FileExt::lock_exclusive(&file)?; Ok(Self { file }) } @@ -26,7 +25,7 @@ impl FileLock { impl Drop for FileLock { fn drop(&mut self) { - if let Err(e) = self.file.unlock() { + if let Err(e) = fs2::FileExt::unlock(&self.file) { tracing::warn!("failed to release lock: {e:?}"); } } diff --git a/tooling/nargo_toml/src/lib.rs b/tooling/nargo_toml/src/lib.rs index edf26411cf5..3a4d5e2d9b5 100644 --- a/tooling/nargo_toml/src/lib.rs +++ b/tooling/nargo_toml/src/lib.rs @@ -190,7 +190,7 @@ impl PackageConfig { return Err(ManifestError::InvalidPackageType( root_dir.join("Nargo.toml"), invalid.to_string(), - )) + )); } None => return Err(ManifestError::MissingPackageType(root_dir.join("Nargo.toml"))), }; @@ -389,7 +389,7 @@ fn toml_to_workspace( let member = package_config.resolve_to_package(&nargo_toml.root_dir, &mut resolved)?; match &package_selection { PackageSelection::Selected(selected_name) if selected_name != &member.name => { - return Err(ManifestError::MissingSelectedPackage(member.name)) + return Err(ManifestError::MissingSelectedPackage(member.name)); } _ => Workspace { root_dir: nargo_toml.root_dir, @@ -649,7 +649,8 @@ mod tests { assert!( indent <= current_indent + 1, - "cannot increase indent by more than {INDENT_SIZE}; item = {item}, current_dir={}", current_dir.display() + "cannot increase indent by more than {INDENT_SIZE}; item = {item}, current_dir={}", + current_dir.display() ); // Go into the last created directory diff --git a/tooling/nargo_toml/src/semver.rs b/tooling/nargo_toml/src/semver.rs index ececa1b30dd..02a7488379b 100644 --- a/tooling/nargo_toml/src/semver.rs +++ b/tooling/nargo_toml/src/semver.rs @@ -37,7 +37,7 @@ fn semver_check_package(package: &Package, compiler_version: &Version) -> Result return Err(SemverError::CouldNotParseRequiredVersion { package_name: package.name.clone().into(), error: err.to_string(), - }) + }); } }; @@ -109,12 +109,16 @@ mod tests { expression_width: None, }; if let Err(err) = semver_check_package(&package, &compiler_version) { - panic!("semver check should have passed. compiler version is 0.1.0 and required version from the package is 0.1.0\n error: {err:?}") + panic!( + "semver check should have passed. compiler version is 0.1.0 and required version from the package is 0.1.0\n error: {err:?}" + ) }; package.compiler_required_version = Some("0.2.0".to_string()); let got_err = match semver_check_package(&package, &compiler_version) { - Ok(_) => panic!("semver check should have failed. compiler version is 0.1.0 and required version from the package is 0.2.0"), + Ok(_) => panic!( + "semver check should have failed. compiler version is 0.1.0 and required version from the package is 0.2.0" + ), Err(err) => err, }; @@ -168,15 +172,19 @@ mod tests { ); if let Err(err) = semver_check_package(&package, &compiler_version) { - panic!("semver check should have passed. compiler version is 0.1.0 and required version from the package is 0.1.0\n error: {err:?}") + panic!( + "semver check should have passed. compiler version is 0.1.0 and required version from the package is 0.1.0\n error: {err:?}" + ) }; package.dependencies.insert( CrateName::from_str("test_dep_invalid").unwrap(), Dependency::Local { package: invalid_dependency.clone() }, ); - let got_err = match semver_check_package(&package,&compiler_version) { - Ok(_) => panic!("semver check should have failed. compiler version is 0.1.0 and required version from the package is 0.2.0"), + let got_err = match semver_check_package(&package, &compiler_version) { + Ok(_) => panic!( + "semver check should have failed. compiler version is 0.1.0 and required version from the package is 0.2.0" + ), Err(err) => err, }; @@ -204,7 +212,9 @@ mod tests { }; if let Err(err) = semver_check_package(&package, &compiler_version) { - panic!("semver check should have passed. compiler version is 0.2.0 and required version from the package is >=0.1.0\n error: {err:?}") + panic!( + "semver check should have passed. compiler version is 0.2.0 and required version from the package is >=0.1.0\n error: {err:?}" + ) }; } @@ -244,7 +254,9 @@ mod tests { }; if let Err(err) = semver_check_package(&package, &compiler_version) { - panic!("semver check should have passed. compiler version is 0.1.0+build_data and required version from the package is 0.1.0\n The build data should be ignored\n error: {err:?}") + panic!( + "semver check should have passed. compiler version is 0.1.0+build_data and required version from the package is 0.1.0\n The build data should be ignored\n error: {err:?}" + ) }; } } diff --git a/tooling/noirc_abi/src/errors.rs b/tooling/noirc_abi/src/errors.rs index c46945d8ff2..3e19f5c7d82 100644 --- a/tooling/noirc_abi/src/errors.rs +++ b/tooling/noirc_abi/src/errors.rs @@ -13,9 +13,13 @@ pub enum InputParserError { "The value passed for parameter `{arg_name}` is invalid:\nExpected witness values to be integers, but `{value}` failed with `{error}`" )] ParseStr { arg_name: String, value: String, error: String }, - #[error("The value passed for parameter `{arg_name}` is invalid:\nValue {value} is less than minimum allowed value of {min}")] + #[error( + "The value passed for parameter `{arg_name}` is invalid:\nValue {value} is less than minimum allowed value of {min}" + )] InputUnderflowsMinimum { arg_name: String, value: String, min: String }, - #[error("The value passed for parameter `{arg_name}` is invalid:\nValue {value} exceeds maximum allowed value of {max}")] + #[error( + "The value passed for parameter `{arg_name}` is invalid:\nValue {value} exceeds maximum allowed value of {max}" + )] InputOverflowsMaximum { arg_name: String, value: String, max: String }, #[error( "The value passed for parameter `{arg_name}` is invalid:\nValue {value} exceeds field modulus. Values must fall within [0, {})", @@ -58,9 +62,13 @@ pub enum AbiError { "Could not read witness value at index {witness_index:?} (required for parameter \"{name}\")" )] MissingParamWitnessValue { name: String, witness_index: Witness }, - #[error("Attempted to write to witness index {0:?} but it is already initialized to a different value")] + #[error( + "Attempted to write to witness index {0:?} but it is already initialized to a different value" + )] InconsistentWitnessAssignment(Witness), - #[error("The return value is expected to be a {return_type:?} but found incompatible value {value:?}")] + #[error( + "The return value is expected to be a {return_type:?} but found incompatible value {value:?}" + )] ReturnTypeMismatch { return_type: AbiType, value: InputValue }, #[error("No return value is expected but received {0:?}")] UnexpectedReturnValue(InputValue), diff --git a/tooling/noirc_abi/src/input_parser/mod.rs b/tooling/noirc_abi/src/input_parser/mod.rs index 37b535e5d0a..8e90832f9fc 100644 --- a/tooling/noirc_abi/src/input_parser/mod.rs +++ b/tooling/noirc_abi/src/input_parser/mod.rs @@ -27,7 +27,9 @@ pub enum InputValue { pub enum InputTypecheckingError { #[error("Value {value:?} does not fall within range of allowable values for a {typ:?}")] OutsideOfValidRange { path: String, typ: AbiType, value: InputValue }, - #[error("Type {typ:?} is expected to have length {expected_length} but value {value:?} has length {actual_length}")] + #[error( + "Type {typ:?} is expected to have length {expected_length} but value {value:?} has length {actual_length}" + )] LengthMismatch { path: String, typ: AbiType, @@ -35,9 +37,13 @@ pub enum InputTypecheckingError { expected_length: usize, actual_length: usize, }, - #[error("Could not find value for required field `{expected_field}`. Found values for fields {found_fields:?}")] + #[error( + "Could not find value for required field `{expected_field}`. Found values for fields {found_fields:?}" + )] MissingField { path: String, expected_field: String, found_fields: Vec }, - #[error("Additional unexpected field was provided for type {typ:?}. Found field named `{extra_field}`")] + #[error( + "Additional unexpected field was provided for type {typ:?}. Found field named `{extra_field}`" + )] UnexpectedField { path: String, typ: AbiType, extra_field: String }, #[error("Type {typ:?} and value {value:?} do not match")] IncompatibleTypes { path: String, typ: AbiType, value: InputValue }, diff --git a/tooling/noirc_abi/src/lib.rs b/tooling/noirc_abi/src/lib.rs index 5f5f3748bc4..99428849cbc 100644 --- a/tooling/noirc_abi/src/lib.rs +++ b/tooling/noirc_abi/src/lib.rs @@ -202,7 +202,7 @@ impl Abi { let has_public_return = self .return_type .as_ref() - .map_or(false, |typ| matches!(typ.visibility, AbiVisibility::Public)); + .is_some_and(|typ| matches!(typ.visibility, AbiVisibility::Public)); has_public_args || has_public_return } @@ -263,7 +263,7 @@ impl Abi { encoded_inputs.push(encoded_return_fields); } (None, Some(return_value)) => { - return Err(AbiError::UnexpectedReturnValue(return_value)) + return Err(AbiError::UnexpectedReturnValue(return_value)); } // We allow not passing a return value despite the circuit defining one // in order to generate the initial partial witness. diff --git a/tooling/profiler/src/cli/opcodes_flamegraph_cmd.rs b/tooling/profiler/src/cli/opcodes_flamegraph_cmd.rs index 4dace54b123..e882ee65075 100644 --- a/tooling/profiler/src/cli/opcodes_flamegraph_cmd.rs +++ b/tooling/profiler/src/cli/opcodes_flamegraph_cmd.rs @@ -142,7 +142,7 @@ fn locate_brillig_call( for (acir_opcode_index, acir_opcode) in acir_fn.opcodes.iter().enumerate() { match acir_opcode { Opcode::BrilligCall { id, .. } if id.as_usize() == brillig_fn_index => { - return Some((acir_fn_index, acir_opcode_index)) + return Some((acir_fn_index, acir_opcode_index)); } _ => {} }